gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.transport.http.netty.listener;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
import io.netty.handler.codec.http.websocketx.CloseWebSocketFrame;
import io.netty.handler.codec.http.websocketx.PingWebSocketFrame;
import io.netty.handler.codec.http.websocketx.PongWebSocketFrame;
import io.netty.handler.codec.http.websocketx.TextWebSocketFrame;
import io.netty.handler.codec.http.websocketx.WebSocketFrame;
import io.netty.handler.timeout.IdleStateEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.transport.http.netty.common.Constants;
import org.wso2.carbon.transport.http.netty.contract.ServerConnectorException;
import org.wso2.carbon.transport.http.netty.contract.ServerConnectorFuture;
import org.wso2.carbon.transport.http.netty.contract.websocket.WebSocketBinaryMessage;
import org.wso2.carbon.transport.http.netty.contract.websocket.WebSocketCloseMessage;
import org.wso2.carbon.transport.http.netty.contract.websocket.WebSocketControlMessage;
import org.wso2.carbon.transport.http.netty.contract.websocket.WebSocketControlSignal;
import org.wso2.carbon.transport.http.netty.contract.websocket.WebSocketTextMessage;
import org.wso2.carbon.transport.http.netty.contractimpl.HttpWsServerConnectorFuture;
import org.wso2.carbon.transport.http.netty.contractimpl.websocket.WebSocketMessageImpl;
import org.wso2.carbon.transport.http.netty.contractimpl.websocket.message.WebSocketBinaryMessageImpl;
import org.wso2.carbon.transport.http.netty.contractimpl.websocket.message.WebSocketCloseMessageImpl;
import org.wso2.carbon.transport.http.netty.contractimpl.websocket.message.WebSocketControlMessageImpl;
import org.wso2.carbon.transport.http.netty.contractimpl.websocket.message.WebSocketTextMessageImpl;
import org.wso2.carbon.transport.http.netty.exception.UnknownWebSocketFrameTypeException;
import org.wso2.carbon.transport.http.netty.internal.websocket.WebSocketSessionImpl;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.Map;
/**
* This class handles all kinds of WebSocketFrames
* after connection is upgraded from HTTP to WebSocket.
*/
public class WebSocketSourceHandler extends SourceHandler {
private static final Logger logger = LoggerFactory.getLogger(WebSocketSourceHandler.class);
private final String target;
private final ChannelHandlerContext ctx;
private final boolean isSecured;
private final ServerConnectorFuture connectorFuture;
private final WebSocketSessionImpl channelSession;
private final Map<String, String> headers;
private final String interfaceId;
private String subProtocol = null;
/**
* @param connectorFuture {@link ServerConnectorFuture} to notify messages to application.
* @param isSecured indication of whether the connection is secured or not.
* @param channelSession session relates to the channel.
* @param httpRequest {@link HttpRequest} which contains the details of WebSocket Upgrade.
* @param headers Headers obtained from HTTP WebSocket upgrade request.
* @param ctx {@link ChannelHandlerContext} of WebSocket connection.
* @param interfaceId given ID for the socket interface.
* @throws Exception if any error occurred during construction of {@link WebSocketSourceHandler}.
*/
public WebSocketSourceHandler(ServerConnectorFuture connectorFuture, boolean isSecured,
WebSocketSessionImpl channelSession, HttpRequest httpRequest,
Map<String, String> headers, ChannelHandlerContext ctx, String interfaceId)
throws Exception {
super(new HttpWsServerConnectorFuture(), interfaceId);
this.connectorFuture = connectorFuture;
this.isSecured = isSecured;
this.channelSession = channelSession;
this.ctx = ctx;
this.interfaceId = interfaceId;
this.target = httpRequest.uri();
this.headers = headers;
}
/**
* Retrieve server session of this source handler.
*
* @return the server session of this source handler.
*/
public WebSocketSessionImpl getChannelSession() {
return channelSession;
}
/**
* Set if there is any negotiated sub protocol.
* @param negotiatedSubProtocol negotiated sub protocol for a given connection.
*/
public void setNegotiatedSubProtocol(String negotiatedSubProtocol) {
this.subProtocol = negotiatedSubProtocol;
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof IdleStateEvent) {
IdleStateEvent idleStateEvent = (IdleStateEvent) evt;
if (idleStateEvent.state() == IdleStateEvent.ALL_IDLE_STATE_EVENT.state()) {
notifyIdleTimeout();
}
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
if (channelSession.isOpen()) {
channelSession.setIsOpen(false);
int statusCode = 1001; // Client is going away.
String reasonText = "Client is going away";
notifyCloseMessage(statusCode, reasonText);
}
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
throws UnknownWebSocketFrameTypeException, ServerConnectorException {
if (!(msg instanceof WebSocketFrame)) {
logger.error("Expecting WebSocketFrame. Unknown type.");
throw new UnknownWebSocketFrameTypeException("Expecting WebSocketFrame. Unknown type.");
}
if (msg instanceof TextWebSocketFrame) {
notifyTextMessage((TextWebSocketFrame) msg);
} else if (msg instanceof BinaryWebSocketFrame) {
notifyBinaryMessage((BinaryWebSocketFrame) msg);
} else if (msg instanceof CloseWebSocketFrame) {
notifyCloseMessage((CloseWebSocketFrame) msg);
} else if (msg instanceof PingWebSocketFrame) {
notifyPingMessage((PingWebSocketFrame) msg);
} else if (msg instanceof PongWebSocketFrame) {
notifyPongMessage((PongWebSocketFrame) msg);
}
}
private void notifyTextMessage(TextWebSocketFrame textWebSocketFrame) throws ServerConnectorException {
String text = textWebSocketFrame.text();
boolean isFinalFragment = textWebSocketFrame.isFinalFragment();
WebSocketMessageImpl webSocketTextMessage =
new WebSocketTextMessageImpl(text, isFinalFragment);
webSocketTextMessage = setupCommonProperties(webSocketTextMessage);
connectorFuture.notifyWSListener((WebSocketTextMessage) webSocketTextMessage);
}
private void notifyBinaryMessage(BinaryWebSocketFrame binaryWebSocketFrame) throws ServerConnectorException {
ByteBuf byteBuf = binaryWebSocketFrame.content();
boolean finalFragment = binaryWebSocketFrame.isFinalFragment();
ByteBuffer byteBuffer = byteBuf.nioBuffer();
WebSocketMessageImpl webSocketBinaryMessage =
new WebSocketBinaryMessageImpl(byteBuffer, finalFragment);
webSocketBinaryMessage = setupCommonProperties(webSocketBinaryMessage);
connectorFuture.notifyWSListener((WebSocketBinaryMessage) webSocketBinaryMessage);
}
private void notifyCloseMessage(CloseWebSocketFrame closeWebSocketFrame) throws ServerConnectorException {
String reasonText = closeWebSocketFrame.reasonText();
int statusCode = closeWebSocketFrame.statusCode();
ctx.channel().close();
channelSession.setIsOpen(false);
WebSocketMessageImpl webSocketCloseMessage =
new WebSocketCloseMessageImpl(statusCode, reasonText);
webSocketCloseMessage = setupCommonProperties(webSocketCloseMessage);
connectorFuture.notifyWSListener((WebSocketCloseMessage) webSocketCloseMessage);
}
private void notifyCloseMessage(int statusCode, String reasonText) throws ServerConnectorException {
ctx.channel().close();
channelSession.setIsOpen(false);
WebSocketMessageImpl webSocketCloseMessage =
new WebSocketCloseMessageImpl(statusCode, reasonText);
webSocketCloseMessage = setupCommonProperties(webSocketCloseMessage);
connectorFuture.notifyWSListener((WebSocketCloseMessage) webSocketCloseMessage);
}
private void notifyPingMessage(PingWebSocketFrame pingWebSocketFrame) throws ServerConnectorException {
//Control message for WebSocket is Ping Message
ByteBuf byteBuf = pingWebSocketFrame.content();
ByteBuffer byteBuffer = byteBuf.nioBuffer();
WebSocketMessageImpl webSocketControlMessage =
new WebSocketControlMessageImpl(WebSocketControlSignal.PING, byteBuffer);
webSocketControlMessage = setupCommonProperties(webSocketControlMessage);
connectorFuture.notifyWSListener((WebSocketControlMessage) webSocketControlMessage);
}
private void notifyPongMessage(PongWebSocketFrame pongWebSocketFrame) throws ServerConnectorException {
//Control message for WebSocket is Pong Message
ByteBuf byteBuf = pongWebSocketFrame.content();
ByteBuffer byteBuffer = byteBuf.nioBuffer();
WebSocketMessageImpl webSocketControlMessage =
new WebSocketControlMessageImpl(WebSocketControlSignal.PONG, byteBuffer);
webSocketControlMessage = setupCommonProperties(webSocketControlMessage);
connectorFuture.notifyWSListener((WebSocketControlMessage) webSocketControlMessage);
}
private void notifyIdleTimeout() throws ServerConnectorException {
WebSocketMessageImpl websocketControlMessage =
new WebSocketControlMessageImpl(WebSocketControlSignal.IDLE_TIMEOUT, null);
websocketControlMessage = setupCommonProperties(websocketControlMessage);
connectorFuture.notifyWSIdleTimeout((WebSocketControlMessage) websocketControlMessage);
}
private WebSocketMessageImpl setupCommonProperties(WebSocketMessageImpl webSocketMessage) {
webSocketMessage.setSubProtocol(subProtocol);
webSocketMessage.setTarget(target);
webSocketMessage.setListenerInterface(interfaceId);
webSocketMessage.setIsConnectionSecured(isSecured);
webSocketMessage.setIsServerMessage(true);
webSocketMessage.setChannelSession(channelSession);
webSocketMessage.setHeaders(headers);
webSocketMessage.setSessionlID(channelSession.getId());
webSocketMessage.setProperty(Constants.SRC_HANDLER, this);
webSocketMessage.setProperty(org.wso2.carbon.messaging.Constants.LISTENER_PORT,
((InetSocketAddress) ctx.channel().localAddress()).getPort());
webSocketMessage.setProperty(Constants.LOCAL_ADDRESS, ctx.channel().localAddress());
webSocketMessage.setProperty(
Constants.LOCAL_NAME, ((InetSocketAddress) ctx.channel().localAddress()).getHostName());
return webSocketMessage;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
ctx.channel().writeAndFlush(new CloseWebSocketFrame(1011,
"Encountered an unexpected condition"));
ctx.close();
connectorFuture.notifyWSListener(cause);
}
}
| |
package apple.mlcompute;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSData;
import apple.foundation.NSDictionary;
import apple.foundation.NSError;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* MLCTrainingGraph
* <p>
* A training graph created from one or more MLCGraph objects
* plus additional layers added directly to the training graph.
*/
@Generated
@Library("MLCompute")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class MLCTrainingGraph extends MLCGraph {
static {
NatJ.register();
}
@Generated
protected MLCTrainingGraph(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
/**
* Add the list of inputs to the training graph
*
* @param inputs The inputs
* @param lossLabels The loss label inputs
* @return A boolean indicating success or failure
*/
@Generated
@Selector("addInputs:lossLabels:")
public native boolean addInputsLossLabels(NSDictionary<String, ? extends MLCTensor> inputs,
NSDictionary<String, ? extends MLCTensor> lossLabels);
/**
* Add the list of inputs to the training graph
* <p>
* Each input, loss label or label weights tensor is identified by a NSString.
* When the training graph is executed, this NSString is used to identify which data object
* should be as input data for each tensor whose device memory needs to be updated
* before the graph is executed.
*
* @param inputs The inputs
* @param lossLabels The loss label inputs
* @param lossLabelWeights The loss label weights
* @return A boolean indicating success or failure
*/
@Generated
@Selector("addInputs:lossLabels:lossLabelWeights:")
public native boolean addInputsLossLabelsLossLabelWeights(NSDictionary<String, ? extends MLCTensor> inputs,
NSDictionary<String, ? extends MLCTensor> lossLabels,
NSDictionary<String, ? extends MLCTensor> lossLabelWeights);
/**
* Add the list of outputs to the training graph
*
* @param outputs The outputs
* @return A boolean indicating success or failure
*/
@Generated
@Selector("addOutputs:")
public native boolean addOutputs(NSDictionary<String, ? extends MLCTensor> outputs);
@Generated
@Owned
@Selector("alloc")
public static native MLCTrainingGraph alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native MLCTrainingGraph allocWithZone(VoidPtr zone);
/**
* Allocate an entry for a user specified gradient for a tensor
*
* @param tensor A result tensor produced by a layer in the training graph
* that is input to some user specified code and will need to
* provide a user gradient during the gradient pass.
* @return A gradient tensor
*/
@Generated
@Selector("allocateUserGradientForTensor:")
public native MLCTensor allocateUserGradientForTensor(MLCTensor tensor);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
/**
* Associates the given optimizer data and device data buffers with the tensor.
* Returns true if the data is successfully associated with the tensor and copied to the device.
* <p>
* The caller must guarantee the lifetime of the underlying memory of \p data for the entirety of the tensor's
* lifetime. The \p deviceData buffers are allocated by MLCompute. This method must be called
* before executeOptimizerUpdateWithOptions or executeWithInputsData is called for the training graph.
* We recommend using this method instead of using [MLCTensor bindOptimizerData] especially if the
* optimizer update is being called multiple times for each batch.
*
* @param data The optimizer data to be associated with the tensor
* @param deviceData The optimizer device data to be associated with the tensor
* @param tensor The tensor
* @return A Boolean value indicating whether the data is successfully associated with the tensor .
*/
@Generated
@Selector("bindOptimizerData:deviceData:withTensor:")
public native boolean bindOptimizerDataDeviceDataWithTensor(NSArray<? extends MLCTensorData> data,
NSArray<? extends MLCTensorOptimizerDeviceData> deviceData, MLCTensor tensor);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
/**
* Compile the optimizer to be used with a training graph.
* <p>
* Typically the optimizer to be used with a training graph is specifed when the training graph is created using
* graphWithGraphObjects:lossLayer:optimizer. The optimizer will be compiled in when compileWithOptions:device
* is called if an optimizer is specified with the training graph. In the case where the optimizer to be used is not known
* when the graph is created or compiled, this method can be used to associate and compile a training graph with an optimizer.
*
* @param optimizer The MLCOptimizer object
* @return A boolean indicating success or failure
*/
@Generated
@Selector("compileOptimizer:")
public native boolean compileOptimizer(MLCOptimizer optimizer);
/**
* Compile the training graph for a device.
*
* @param options The compiler options to use when compiling the training graph
* @param device The MLCDevice object
* @return A boolean indicating success or failure
*/
@Generated
@Selector("compileWithOptions:device:")
public native boolean compileWithOptionsDevice(long options, MLCDevice device);
/**
* Compile the training graph for a device.
* <p>
* Specifying the list of constant tensors when we compile the graph allows MLCompute to perform additional optimizations at compile time.
*
* @param options The compiler options to use when compiling the training graph
* @param device The MLCDevice object
* @param inputTensors The list of input tensors that are constants
* @param inputTensorsData The tensor data to be used with these constant input tensors
* @return A boolean indicating success or failure
*/
@Generated
@Selector("compileWithOptions:device:inputTensors:inputTensorsData:")
public native boolean compileWithOptionsDeviceInputTensorsInputTensorsData(long options, MLCDevice device,
NSDictionary<String, ? extends MLCTensor> inputTensors,
NSDictionary<String, ? extends MLCTensorData> inputTensorsData);
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
/**
* [@property] The device memory size used by the training graph
* <p>
* Returns the total size in bytes of device memory used for all intermediate tensors
* for forward, gradient passes and optimizer update for all layers in the training graph.
* We recommend executing an iteration before checking the device memory size as
* the buffers needed get allocated when the corresponding pass such as gradient,
* optimizer update is executed.
*
* @return A NSUInteger value
*/
@Generated
@Selector("deviceMemorySize")
@NUInt
public native long deviceMemorySize();
/**
* Execute the forward pass of the training graph
*
* @param batchSize The batch size to use. For a graph where batch size changes between layers this value must be 0.
* @param options The execution options
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeForwardWithBatchSize:options:completionHandler:")
public native boolean executeForwardWithBatchSizeOptionsCompletionHandler(@NUInt long batchSize, long options,
@ObjCBlock(name = "call_executeForwardWithBatchSizeOptionsCompletionHandler") Block_executeForwardWithBatchSizeOptionsCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeForwardWithBatchSizeOptionsCompletionHandler {
@Generated
void call_executeForwardWithBatchSizeOptionsCompletionHandler(MLCTensor resultTensor, NSError error,
double executionTime);
}
/**
* Execute the forward pass for the training graph
*
* @param batchSize The batch size to use. For a graph where batch size changes between layers this value must be 0.
* @param options The execution options
* @param outputsData The data objects to use for outputs
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeForwardWithBatchSize:options:outputsData:completionHandler:")
public native boolean executeForwardWithBatchSizeOptionsOutputsDataCompletionHandler(@NUInt long batchSize,
long options, NSDictionary<String, ? extends MLCTensorData> outputsData,
@ObjCBlock(name = "call_executeForwardWithBatchSizeOptionsOutputsDataCompletionHandler") Block_executeForwardWithBatchSizeOptionsOutputsDataCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeForwardWithBatchSizeOptionsOutputsDataCompletionHandler {
@Generated
void call_executeForwardWithBatchSizeOptionsOutputsDataCompletionHandler(MLCTensor resultTensor, NSError error,
double executionTime);
}
/**
* Execute the gradient pass of the training graph
*
* @param batchSize The batch size to use. For a graph where batch size changes between layers this value must be 0.
* @param options The execution options
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeGradientWithBatchSize:options:completionHandler:")
public native boolean executeGradientWithBatchSizeOptionsCompletionHandler(@NUInt long batchSize, long options,
@ObjCBlock(name = "call_executeGradientWithBatchSizeOptionsCompletionHandler") Block_executeGradientWithBatchSizeOptionsCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeGradientWithBatchSizeOptionsCompletionHandler {
@Generated
void call_executeGradientWithBatchSizeOptionsCompletionHandler(MLCTensor resultTensor, NSError error,
double executionTime);
}
/**
* Execute the gradient pass of the training graph
*
* @param batchSize The batch size to use. For a graph where batch size changes between layers this value must be 0.
* @param options The execution options
* @param outputsData The data objects to use for outputs
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeGradientWithBatchSize:options:outputsData:completionHandler:")
public native boolean executeGradientWithBatchSizeOptionsOutputsDataCompletionHandler(@NUInt long batchSize,
long options, NSDictionary<String, ? extends MLCTensorData> outputsData,
@ObjCBlock(name = "call_executeGradientWithBatchSizeOptionsOutputsDataCompletionHandler") Block_executeGradientWithBatchSizeOptionsOutputsDataCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeGradientWithBatchSizeOptionsOutputsDataCompletionHandler {
@Generated
void call_executeGradientWithBatchSizeOptionsOutputsDataCompletionHandler(MLCTensor resultTensor, NSError error,
double executionTime);
}
/**
* Execute the optimizer update pass of the training graph
*
* @param options The execution options
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeOptimizerUpdateWithOptions:completionHandler:")
public native boolean executeOptimizerUpdateWithOptionsCompletionHandler(long options,
@ObjCBlock(name = "call_executeOptimizerUpdateWithOptionsCompletionHandler") Block_executeOptimizerUpdateWithOptionsCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeOptimizerUpdateWithOptionsCompletionHandler {
@Generated
void call_executeOptimizerUpdateWithOptionsCompletionHandler(MLCTensor resultTensor, NSError error,
double executionTime);
}
/**
* Execute the training graph (forward, gradient and optimizer update) with given source and label data
* <p>
* Execute the training graph with given source and label data. If an optimizer is specified, the optimizer update is applied.
* If MLCExecutionOptionsSynchronous is specified in 'options', this method returns after the graph has been executed.
* Otherwise, this method returns after the graph has been queued for execution. The completion handler is called after the graph
* has finished execution.
*
* @param inputsData The data objects to use for inputs
* @param lossLabelsData The data objects to use for loss labels
* @param lossLabelWeightsData The data objects to use for loss label weights
* @param batchSize The batch size to use. For a graph where batch size changes between layers this value must be 0.
* @param options The execution options
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeWithInputsData:lossLabelsData:lossLabelWeightsData:batchSize:options:completionHandler:")
public native boolean executeWithInputsDataLossLabelsDataLossLabelWeightsDataBatchSizeOptionsCompletionHandler(
NSDictionary<String, ? extends MLCTensorData> inputsData,
NSDictionary<String, ? extends MLCTensorData> lossLabelsData,
NSDictionary<String, ? extends MLCTensorData> lossLabelWeightsData, @NUInt long batchSize, long options,
@ObjCBlock(name = "call_executeWithInputsDataLossLabelsDataLossLabelWeightsDataBatchSizeOptionsCompletionHandler") Block_executeWithInputsDataLossLabelsDataLossLabelWeightsDataBatchSizeOptionsCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeWithInputsDataLossLabelsDataLossLabelWeightsDataBatchSizeOptionsCompletionHandler {
@Generated
void call_executeWithInputsDataLossLabelsDataLossLabelWeightsDataBatchSizeOptionsCompletionHandler(
MLCTensor resultTensor, NSError error, double executionTime);
}
/**
* Execute the training graph (forward, gradient and optimizer update) with given source and label data
*
* @param inputsData The data objects to use for inputs
* @param lossLabelsData The data objects to use for loss labels
* @param lossLabelWeightsData The data objects to use for loss label weights
* @param outputsData The data objects to use for outputs
* @param batchSize The batch size to use. For a graph where batch size changes between layers this value must be 0.
* @param options The execution options
* @param completionHandler The completion handler
* @return A boolean indicating success or failure
*/
@Generated
@Selector("executeWithInputsData:lossLabelsData:lossLabelWeightsData:outputsData:batchSize:options:completionHandler:")
public native boolean executeWithInputsDataLossLabelsDataLossLabelWeightsDataOutputsDataBatchSizeOptionsCompletionHandler(
NSDictionary<String, ? extends MLCTensorData> inputsData,
NSDictionary<String, ? extends MLCTensorData> lossLabelsData,
NSDictionary<String, ? extends MLCTensorData> lossLabelWeightsData,
NSDictionary<String, ? extends MLCTensorData> outputsData, @NUInt long batchSize, long options,
@ObjCBlock(name = "call_executeWithInputsDataLossLabelsDataLossLabelWeightsDataOutputsDataBatchSizeOptionsCompletionHandler") Block_executeWithInputsDataLossLabelsDataLossLabelWeightsDataOutputsDataBatchSizeOptionsCompletionHandler completionHandler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_executeWithInputsDataLossLabelsDataLossLabelWeightsDataOutputsDataBatchSizeOptionsCompletionHandler {
@Generated
void call_executeWithInputsDataLossLabelsDataLossLabelWeightsDataOutputsDataBatchSizeOptionsCompletionHandler(
MLCTensor resultTensor, NSError error, double executionTime);
}
/**
* Get the gradient data for a trainable parameter associated with a layer
* <p>
* This can be used to get the gradient data for weights or biases parameters associated with a convolution,
* fully connected or convolution transpose layer
*
* @param parameter The updatable parameter associated with the layer
* @param layer A layer in the training graph. Must be one of the following:
* - MLCConvolutionLayer
* - MLCFullyConnectedLayer
* - MLCBatchNormalizationLayer
* - MLCInstanceNormalizationLayer
* - MLCGroupNormalizationLayer
* - MLCLayerNormalizationLayer
* - MLCEmbeddingLayer
* - MLCMultiheadAttentionLayer
* @return The gradient data. Will return nil if the layer is marked as not trainable or if
* training graph is not executed with separate calls to forward and gradient passes.
*/
@Generated
@Selector("gradientDataForParameter:layer:")
public native NSData gradientDataForParameterLayer(MLCTensor parameter, MLCLayer layer);
/**
* Get the gradient tensor for an input tensor
*
* @param input The input tensor
* @return The gradient tensor
*/
@Generated
@Selector("gradientTensorForInput:")
public native MLCTensor gradientTensorForInput(MLCTensor input);
@Generated
@Selector("graph")
public static native MLCTrainingGraph graph();
/**
* Create a training graph
*
* @param graphObjects The layers from these graph objects will be added to the training graph
* @param lossLayer The loss layer to use. The loss layer can also be added to the training graph
* using nodeWithLayer:sources:lossLabels
* @param optimizer The optimizer to use
* @return A new training graph object
*/
@Generated
@Selector("graphWithGraphObjects:lossLayer:optimizer:")
public static native MLCTrainingGraph graphWithGraphObjectsLossLayerOptimizer(
NSArray<? extends MLCGraph> graphObjects, MLCLayer lossLayer, MLCOptimizer optimizer);
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native MLCTrainingGraph init();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
/**
* Link mutiple training graphs
* <p>
* This is used to link subsequent training graphs with first training sub-graph.
* This method should be used when we have tensors shared by one or more layers in multiple sub-graphs
*
* @param graphs The list of training graphs to link
* @return A boolean indicating success or failure
*/
@Generated
@Selector("linkWithGraphs:")
public native boolean linkWithGraphs(NSArray<? extends MLCTrainingGraph> graphs);
@Generated
@Owned
@Selector("new")
public static native MLCTrainingGraph new_objc();
/**
* [@property] optimizer
* <p>
* The optimizer to be used with the training graph
*/
@Generated
@Selector("optimizer")
public native MLCOptimizer optimizer();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* Get the result gradient tensors for a layer in the training graph
*
* @param layer A layer in the training graph
* @return A list of tensors
*/
@Generated
@Selector("resultGradientTensorsForLayer:")
public native NSArray<? extends MLCTensor> resultGradientTensorsForLayer(MLCLayer layer);
/**
* Set the input tensor parameters that also will be updated by the optimizer
* <p>
* These represent the list of input tensors to be updated when we execute the optimizer update
* Weights, bias or beta, gamma tensors are not included in this list. MLCompute automatically
* adds them to the parameter list based on whether the layer is marked as updatable or not.
*
* @param parameters The list of input tensors to be updated by the optimizer
* @return A boolean indicating success or failure
*/
@Generated
@Selector("setTrainingTensorParameters:")
public native boolean setTrainingTensorParameters(NSArray<? extends MLCTensorParameter> parameters);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
/**
* Get the source gradient tensors for a layer in the training graph
*
* @param layer A layer in the training graph
* @return A list of tensors
*/
@Generated
@Selector("sourceGradientTensorsForLayer:")
public native NSArray<? extends MLCTensor> sourceGradientTensorsForLayer(MLCLayer layer);
/**
* Add the list of tensors whose contributions are not to be taken when computing gradients during gradient pass
*
* @param tensors The list of tensors
* @return A boolean indicating success or failure
*/
@Generated
@Selector("stopGradientForTensors:")
public native boolean stopGradientForTensors(NSArray<? extends MLCTensor> tensors);
@Generated
@Selector("superclass")
public static native Class superclass_static();
/**
* Synchronize updates (weights/biases from convolution, fully connected and LSTM layers, tensor parameters)
* from device memory to host memory.
*/
@Generated
@Selector("synchronizeUpdates")
public native void synchronizeUpdates();
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2021 decimal4j (tools4j), Marco Terzer
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.decimal4j.arithmetic;
import java.math.BigDecimal;
import java.math.RoundingMode;
import org.decimal4j.scale.ScaleMetrics;
import org.decimal4j.truncate.DecimalRounding;
import org.decimal4j.truncate.OverflowMode;
import org.decimal4j.truncate.UncheckedRounding;
/**
* Arithmetic implementation with rounding for scales other than zero. If an
* operation leads to an overflow the result is silently truncated.
*/
public final class UncheckedScaleNfRoundingArithmetic extends AbstractUncheckedScaleNfArithmetic {
private final DecimalRounding rounding;
/**
* Constructor for decimal arithmetic with given scale, rounding mode and
* {@link OverflowMode#UNCHECKED SILENT} overflow mode.
*
* @param scaleMetrics
* the scale metrics for this decimal arithmetic
* @param roundingMode
* the rounding mode to use for all decimal arithmetic
*/
public UncheckedScaleNfRoundingArithmetic(ScaleMetrics scaleMetrics, RoundingMode roundingMode) {
this(scaleMetrics, DecimalRounding.valueOf(roundingMode));
}
/**
* Constructor for decimal arithmetic with given scale, rounding mode and
* {@link OverflowMode#UNCHECKED SILENT} overflow mode.
*
* @param scaleMetrics
* the scale metrics for this decimal arithmetic
* @param rounding
* the rounding mode to use for all decimal arithmetic
*/
public UncheckedScaleNfRoundingArithmetic(ScaleMetrics scaleMetrics, DecimalRounding rounding) {
super(scaleMetrics);
this.rounding = rounding;
}
@Override
public final RoundingMode getRoundingMode() {
return rounding.getRoundingMode();
}
@Override
public final UncheckedRounding getTruncationPolicy() {
return UncheckedRounding.valueOf(getRoundingMode());
}
@Override
public final long addUnscaled(long uDecimal, long unscaled, int scale) {
return Add.addUnscaledUnscaled(getScaleMetrics(), rounding, uDecimal, unscaled, scale);
}
@Override
public final long subtractUnscaled(long uDecimal, long unscaled, int scale) {
return Sub.subtractUnscaledUnscaled(getScaleMetrics(), rounding, uDecimal, unscaled, scale);
}
@Override
public final long avg(long uDecimal1, long uDecimal2) {
return Avg.avg(this, rounding, uDecimal1, uDecimal2);
}
@Override
public final long multiply(long uDecimal1, long uDecimal2) {
return Mul.multiply(this, rounding, uDecimal1, uDecimal2);
}
@Override
public final long multiplyByUnscaled(long uDecimal, long unscaled, int scale) {
return Mul.multiplyByUnscaled(rounding, uDecimal, unscaled, scale);
}
@Override
public final long square(long uDecimal) {
return Square.square(getScaleMetrics(), rounding, uDecimal);
}
@Override
public final long sqrt(long uDecimal) {
return Sqrt.sqrt(this, rounding, uDecimal);
}
@Override
public final long divideByLong(long uDecimalDividend, long lDivisor) {
return Div.divideByLong(rounding, uDecimalDividend, lDivisor);
}
@Override
public final long divideByUnscaled(long uDecimal, long unscaled, int scale) {
return Div.divideByUnscaled(rounding, uDecimal, unscaled, scale);
}
@Override
public final long divide(long uDecimalDividend, long uDecimalDivisor) {
return Div.divide(this, rounding, uDecimalDividend, uDecimalDivisor);
}
@Override
public final long invert(long uDecimal) {
return Invert.invert(this, rounding, uDecimal);
}
@Override
public final long pow(long uDecimal, int exponent) {
return Pow.pow(this, rounding, uDecimal, exponent);
}
@Override
public final long shiftLeft(long uDecimal, int positions) {
return Shift.shiftLeft(rounding, uDecimal, positions);
}
@Override
public final long shiftRight(long uDecimal, int positions) {
return Shift.shiftRight(rounding, uDecimal, positions);
}
@Override
public final long multiplyByPowerOf10(long uDecimal, int n) {
return Pow10.multiplyByPowerOf10(rounding, uDecimal, n);
}
@Override
public final long divideByPowerOf10(long uDecimal, int n) {
return Pow10.divideByPowerOf10(rounding, uDecimal, n);
}
@Override
public final long round(long uDecimal, int precision) {
return Round.round(this, rounding, uDecimal, precision);
}
@Override
public final long fromLong(long value) {
return LongConversion.longToUnscaled(getScaleMetrics(), value);
}
@Override
public final long fromUnscaled(long unscaledValue, int scale) {
return UnscaledConversion.unscaledToUnscaled(this, rounding, unscaledValue, scale);
}
@Override
public final long fromFloat(float value) {
return FloatConversion.floatToUnscaled(this, rounding, value);
}
@Override
public final long fromDouble(double value) {
return DoubleConversion.doubleToUnscaled(this, rounding, value);
}
@Override
public final long fromBigDecimal(BigDecimal value) {
return BigDecimalConversion.bigDecimalToUnscaled(getScaleMetrics(), getRoundingMode(), value);
}
@Override
public final long toLong(long uDecimal) {
return LongConversion.unscaledToLong(getScaleMetrics(), rounding, uDecimal);
}
@Override
public final long toUnscaled(long uDecimal, int scale) {
return UnscaledConversion.unscaledToUnscaled(rounding, scale, this, uDecimal);
}
@Override
public final float toFloat(long uDecimal) {
return FloatConversion.unscaledToFloat(this, rounding, uDecimal);
}
@Override
public final double toDouble(long uDecimal) {
return DoubleConversion.unscaledToDouble(this, rounding, uDecimal);
}
@Override
public final long parse(String value) {
return StringConversion.parseUnscaledDecimal(this, rounding, value, 0, value.length());
}
@Override
public final long parse(CharSequence value, int start, int end) {
return StringConversion.parseUnscaledDecimal(this, rounding, value, start, end);
}
}
| |
package com.calsoft.task.action;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.actions.DispatchAction;
import com.calsoft.factory.Factory;
import com.calsoft.task.form.TaskForm;
import com.calsoft.task.service.SaveTaskService;
import com.calsoft.task.service.factory.SaveTaskServiceFactory;
import com.calsoft.user.form.UserForm;
import com.calsoft.user.service.UserService;
import com.calsoft.util.TimeUtility;
public class TaskAction extends DispatchAction {
private static final Logger logger = Logger.getLogger("TaskAction");
UserService userService = null;
static int ID;
// Method for saving single entry.
public void saveAjax(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response)throws Exception {
logger.info("You are inside saveAjax method");
HttpSession session = request.getSession();
String jsonString = request.getParameter("taskJson");
if(session.getAttribute("user_id")!=null) {
int userId = Integer.parseInt(session.getAttribute("user_id").toString());
int userIdFormJson = 0;
try{
TaskForm taskForm = (TaskForm) form;
if(jsonString!=null){
JSONArray object = JSONArray.fromObject(jsonString);
for(int i = 0; i < object.size(); i++){
JSONObject jObj = object.getJSONObject(i);
taskForm.setStatus(jObj.get("status").toString());
taskForm.setBacklog_id(jObj.get("backlog_id").toString());
taskForm.setTask_id(jObj.get("task_id").toString());
taskForm.setTask_description(jObj.get("task_description").toString());
taskForm.setTask_date(jObj.get("date").toString());
taskForm.setWork_status(jObj.get("work_status").toString());
taskForm.setTime(jObj.get("time").toString());
userIdFormJson = Integer.parseInt(jObj.get("userId").toString());
}
}
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
String[] msgArray = new String[2];
if(userIdFormJson!=0){
msgArray = saveTaskService.saveTask(taskForm, userIdFormJson);
}
else{
msgArray = saveTaskService.saveTask(taskForm, userId);
}
if(msgArray[0].equalsIgnoreCase("Invalid entry for Time")){
response.getWriter().print("Invalid time Entry");
}
else{
response.getWriter().print(msgArray[1]);
}
}
catch(Exception e){
e.printStackTrace();
response.getWriter().print("session expired");
}
}
else
response.getWriter().print("session expired");
}
// Method for saving all new entry together.
public void saveAllAjax(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response)throws Exception {
logger.info("You are inside saveAllAjax method.");
HttpSession session = request.getSession();
String jsonString = request.getParameter("taskJson");
if(session.getAttribute("user_id")!=null) {
int userId = Integer.parseInt(session.getAttribute("user_id").toString());
int userIdFormJson = 0;
try{
if(jsonString!=null){
JSONArray object = JSONArray.fromObject(jsonString);
List<TaskForm> listForm = new ArrayList<TaskForm>();
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
for(int i = 0; i < object.size(); i++){
JSONObject jObj = object.getJSONObject(i);
TaskForm tf = new TaskForm();
tf.setStatus(jObj.get("status").toString());
tf.setBacklog_id(jObj.get("backlog_id").toString());
tf.setTask_id(jObj.get("task_id").toString());
tf.setTask_description(jObj.get("task_description").toString());
tf.setTask_date(jObj.get("date").toString());
tf.setWork_status(jObj.get("work_status").toString());
tf.setTime(jObj.get("time").toString());
userIdFormJson = Integer.parseInt(jObj.get("userId").toString());
listForm.add(tf);
}
if(listForm!=null){
List<String> message = null;
if(userIdFormJson!=0){
message = saveTaskService.saveAllTask(listForm,userIdFormJson);
}
else{
message = saveTaskService.saveAllTask(listForm,userId);
}
response.getWriter().print(message);
}
}
}
catch(Exception e){
e.printStackTrace();
response.getWriter().print("session expired");
}
}
else
response.getWriter().print("session expired");
}
// Method for getting task detail month wise after month onChange.
public ActionForward getDetails(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response) throws Exception{
String forwardedPage = "";
HttpSession session = request.getSession();
if(session.getAttribute("userName")!=null){
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
int userId = Integer.parseInt(session.getAttribute("user_id").toString());
TaskForm taskForm = (TaskForm) form;
DateFormat df = new SimpleDateFormat("yyyy-MM");
List<TaskForm> tList = null;
String month = taskForm.getMonth();
Calendar cal = Calendar.getInstance();
try {
tList = saveTaskService.getTaskDetails(month, userId);
request.setAttribute("tList", tList);
cal.setTime(df.parse(month));
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMinimum(Calendar.DAY_OF_MONTH));
userService = Factory.getUserService();
try{
int userManagerId = userService.getAdminUserId();
List<UserForm> listForm = null;
if(userManagerId==userId){
// Write logic
listForm = userService.getAllocatedResourcesBasedOnStartAndExitDate(userId, cal);
request.setAttribute("selectResourceBasedOnUserId", userManagerId);
request.setAttribute("userListSelection", listForm);
}
}
catch(Exception e){
e.printStackTrace();
}
} catch(Exception e){
e.printStackTrace();
throw new Exception();
}
// Added for missing entry reminder Table
UserForm userDetail = userService.getUsernameFromId(userId);
Date resource_start_dt = userDetail.getStart_date();
String ressource_exit_dt_in_string = userDetail.getExit_date();
Calendar cal_for_resource_start_dt = null;
if(resource_start_dt != null){
cal_for_resource_start_dt = Calendar.getInstance();
cal_for_resource_start_dt.setTime(resource_start_dt);
}
Calendar cal_for_resource_end_dt = null;
if(ressource_exit_dt_in_string != null){
cal_for_resource_end_dt = Calendar.getInstance();
cal_for_resource_end_dt.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(ressource_exit_dt_in_string));
}
Calendar cal_for_current_dt = Calendar.getInstance();
if(month != null){
cal_for_current_dt.setTime(df.parse(month));
}
List<String> allmissingDateList = new ArrayList<String>();
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) > cal_for_resource_start_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) >= cal_for_resource_start_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_start_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) < cal_for_resource_end_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) <= cal_for_resource_end_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_end_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null){
if( (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)
&& compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, cal_for_resource_end_dt, cal_for_current_dt);
}else if(!(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt))){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& !(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if( comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt, null)
&& comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, null, cal_for_resource_end_dt) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}
logger.info("Printing missing date list "+allmissingDateList);
request.setAttribute("allmissingDateList", allmissingDateList);
String locDetail = saveTaskService.getResourceLocation(userId);
Date freezeDate = userDetail.getFreeze_timesheet();
if(freezeDate != null){
Calendar calWithFreezingDate = Calendar.getInstance();
calWithFreezingDate.setTime(freezeDate);
cal.setTime(df.parse(month));
//Checking if freezing date exceeds than selectedMonth then blocking new entry.
if(calWithFreezingDate.after(cal) || calWithFreezingDate.equals(cal)){
forwardedPage = "timeEntryFreezedPage";
}
else{
forwardedPage = "getDetailSuccess";
}
}
else{
forwardedPage = "getDetailSuccess";
}
/* Update for Alert Box changes in time entry page */
//Checking previous month time entry for alert Notification on time-entry page
Calendar c1 = Calendar.getInstance();
c1.add(Calendar.MONTH, -1);
List<String> taskFormList = new ArrayList<String>();
List<String> previousMonthWorkingDates = new ArrayList<String>();
Calendar month_cal = Calendar.getInstance();
if(df.format(month_cal.getTime()).equals(month) ){
taskFormList = saveTaskService.getEnteredTimesheetDateForPreviousMonth(c1, userId);
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(null, null, c1);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null && !( compareCalendarEqualityBasedOnMonthAndYear(cal_for_resource_start_dt, Calendar.getInstance()) ) ){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(cal_for_resource_start_dt, null, c1);
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(null, cal_for_resource_end_dt, c1);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null && ( !compareCalendarEqualityBasedOnMonthAndYear(cal_for_resource_start_dt, Calendar.getInstance()) && comapareCalendarMonthAndYearBasedOnMonthAndYear(c1, null, cal_for_resource_end_dt)) ){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(cal_for_resource_start_dt, cal_for_resource_end_dt, c1);
}
if(!previousMonthWorkingDates.isEmpty() && (taskFormList.isEmpty() || !taskFormList.containsAll(previousMonthWorkingDates))){
request.setAttribute("showAlertNotification", "showAlertNotification");
}
}
/*Update for alert box ends here.*/
request.setAttribute("resourceLocation", locDetail);
request.setAttribute("month", month);
return mapping.findForward(forwardedPage);
}
else
throw new Exception();
}
// Method for deleting entry.
public void delete(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response) throws Exception{
HttpSession session = request.getSession();
if(session.getAttribute("userName")!=null){
try{
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
int id = Integer.parseInt(request.getParameter("id"));
saveTaskService.doDelete(id);
}
catch(Exception e){
e.printStackTrace();
response.getWriter().print("session expired");
}
}
else
response.getWriter().print("session expired");
}
// Method for saving entry after editing.
public void saveEditAjax(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response)throws Exception{
HttpSession session = request.getSession();
if(session.getAttribute("user_id")!=null) {
try{
String jsonString = request.getParameter("taskJson");
TaskForm taskForm = (TaskForm) form;
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
int userId = Integer.parseInt(session.getAttribute("user_id").toString());
int id = 0;
int userIdFormJson = 0;
if(jsonString!=null){
JSONArray object = JSONArray.fromObject(jsonString);
for(int i = 0; i < object.size(); i++){
JSONObject jObj = object.getJSONObject(i);
taskForm.setStatus(jObj.get("status").toString());
taskForm.setBacklog_id(jObj.get("backlog_id").toString());
taskForm.setTask_id(jObj.get("task_id").toString());
taskForm.setTask_description(jObj.get("task_description").toString());
taskForm.setTask_date(jObj.get("date").toString());
taskForm.setWork_status(jObj.get("work_status").toString());
taskForm.setTime(jObj.get("time").toString());
taskForm.setId((Integer)jObj.get("id"));
userIdFormJson = Integer.parseInt(jObj.get("userId").toString());
id = taskForm.getId();
}
}
String msg = "";
if(userIdFormJson!=0){
msg = saveTaskService.editsaveTask(taskForm, id, userIdFormJson);
}
else{
msg = saveTaskService.editsaveTask(taskForm, id, userId);
}
if(msg.equalsIgnoreCase("Invalid entry for Time")){
response.getWriter().print("Invalid time Entry");
}
else if(msg.equalsIgnoreCase("Data edited sucessfully")){
response.getWriter().print("Data edited sucessfully");
}
}
catch (Exception e){
e.printStackTrace();
response.getWriter().print("session expired");
}
}
else{
response.getWriter().print("session expired");
}
}
// Method for getting time entry page.
public ActionForward onClickTask(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response)throws Exception {
String forwardedPage = "";
HttpSession s1 = request.getSession();
s1.removeAttribute("selectedDate");
s1.removeAttribute("conList");
s1.removeAttribute("userList");
s1.removeAttribute("conListUpdate");
if(s1.getAttribute("userName")!=null){
try{
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
Calendar cal = Calendar.getInstance();
DateFormat df = new SimpleDateFormat("yyyy-MM");
String year_month = df.format(cal.getTime());
List<TaskForm> tList = null;
int userId = Integer.parseInt(s1.getAttribute("user_id").toString());
tList = saveTaskService.getTaskDetails(year_month, userId);
String locDetail = saveTaskService.getResourceLocation(userId);
userService = Factory.getUserService();
try{
int userManagerId = userService.getAdminUserId();
List<UserForm> listForm = null;
if(userManagerId==userId){
// Write logic here for resource wise time entry page.
listForm = userService.getAllocatedResourcesBasedOnStartAndExitDate(userId, cal);
request.setAttribute("selectResourceBasedOnUserId", userManagerId);
request.setAttribute("userListSelection", listForm);
}
}
catch(Exception e){
e.printStackTrace();
}
// Added for missing entry reminder Table
UserForm userDetail = userService.getUsernameFromId(userId);
Date resource_start_dt = userDetail.getStart_date();
String ressource_exit_dt_in_string = userDetail.getExit_date();
Calendar cal_for_resource_start_dt = null;
if(resource_start_dt != null){
cal_for_resource_start_dt = Calendar.getInstance();
cal_for_resource_start_dt.setTime(resource_start_dt);
}
Calendar cal_for_resource_end_dt = null;
if(ressource_exit_dt_in_string != null){
cal_for_resource_end_dt = Calendar.getInstance();
cal_for_resource_end_dt.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(ressource_exit_dt_in_string));
}
Calendar cal_for_current_dt = Calendar.getInstance();
List<String> allmissingDateList = new ArrayList<String>();
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) > cal_for_resource_start_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) >= cal_for_resource_start_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_start_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) < cal_for_resource_end_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) <= cal_for_resource_end_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_end_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null){
if( (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)
&& compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, cal_for_resource_end_dt, cal_for_current_dt);
}else if(!(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt))){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& !(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if( comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt, null)
&& comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, null, cal_for_resource_end_dt) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}
request.setAttribute("allmissingDateList", allmissingDateList);
// Check for freezing status
Date freezeDate = userDetail.getFreeze_timesheet();
if(freezeDate != null){
Calendar calWithFreezingDate = Calendar.getInstance();
calWithFreezingDate.setTime(freezeDate);
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMinimum(Calendar.DAY_OF_MONTH));
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
//Checking if freezing date exceeds than currentDate then blocking new entry.
if(calWithFreezingDate.after(cal) || calWithFreezingDate.equals(cal)){
forwardedPage = "timeEntryFreezedPage";
}
else{
forwardedPage = "onClickTask";
}
}
else{
forwardedPage = "onClickTask";
}
// Checking previous month time entry for alert Notification on time-entry page
Calendar c1 = Calendar.getInstance();
c1.add(Calendar.MONTH, -1);
List<String> previousMonthWorkingDates = new ArrayList<String>();
List<String> taskFormList = saveTaskService.getEnteredTimesheetDateForPreviousMonth(c1, userId);
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(null, null, c1);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null && !( compareCalendarEqualityBasedOnMonthAndYear(cal_for_resource_start_dt, Calendar.getInstance()) ) ){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(cal_for_resource_start_dt, null, c1);
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(null, cal_for_resource_end_dt, c1);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null && ( !compareCalendarEqualityBasedOnMonthAndYear(cal_for_resource_start_dt, Calendar.getInstance()) && comapareCalendarMonthAndYearBasedOnMonthAndYear(c1, null, cal_for_resource_end_dt)) ){
previousMonthWorkingDates = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateForPreviousMonthEntryNotification(cal_for_resource_start_dt, cal_for_resource_end_dt, c1);
}
logger.info("Printing two list content from TaskAction onclick Method"+taskFormList+" list 2 \n"+previousMonthWorkingDates);
if(!previousMonthWorkingDates.isEmpty() && (taskFormList.isEmpty() || !taskFormList.containsAll(previousMonthWorkingDates))){
request.setAttribute("showAlertNotification", "showAlertNotification");
}
// Updates ends here.
request.setAttribute("resourceLocation", locDetail);
request.setAttribute("tList", tList);
}
catch(Exception e){
e.printStackTrace();
throw new Exception();
}
return mapping.findForward(forwardedPage);
}
else
throw new Exception();
}
private boolean compareCalendarEqualityBasedOnMonthAndYear(Calendar c1, Calendar c2){
boolean comapreStatus = false;
if((c1.get(Calendar.MONTH) == c2.get(Calendar.MONTH)) && (c1.get(Calendar.YEAR) == c2.get(Calendar.YEAR))){
comapreStatus = true;
}
return comapreStatus;
}
private boolean comapareCalendarMonthAndYearBasedOnMonthAndYear(Calendar currentCal, Calendar startCal, Calendar exitCal){
boolean statusBol = false;
if(startCal != null){
if((startCal.get(Calendar.YEAR) < currentCal.get(Calendar.YEAR)) ||
(currentCal.get(Calendar.MONTH) >= startCal.get(Calendar.MONTH) && currentCal.get(Calendar.YEAR) == startCal.get(Calendar.YEAR))){
statusBol = true;
}
}else if(exitCal != null){
if((exitCal.get(Calendar.YEAR) > currentCal.get(Calendar.YEAR)) ||
(currentCal.get(Calendar.MONTH) <= exitCal.get(Calendar.MONTH) && currentCal.get(Calendar.YEAR) == exitCal.get(Calendar.YEAR))){
statusBol = true;
}
}
return statusBol;
}
// Getting time entry details resource wise.
public ActionForward getDetailsBasedOnResourceName(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response)throws Exception{
String forwardedPage = "";
HttpSession s1 = request.getSession();
if(s1.getAttribute("userName")!=null){
TaskForm taskForm = (TaskForm) form;
String month = taskForm.getMonth();
int selectedUserId = taskForm.getUserId();
int userIdFromSession = Integer.parseInt(s1.getAttribute("user_id").toString());
List<TaskForm> tList = null;
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
tList = saveTaskService.getTaskDetails(month, selectedUserId);
request.setAttribute("tList", tList);
Calendar cal = Calendar.getInstance();
DateFormat df = new SimpleDateFormat("yyyy-MM");
cal.setTime(df.parse(month));
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMinimum(Calendar.DAY_OF_MONTH));
userService = Factory.getUserService();
try{
int userManagerId = userService.getAdminUserId();
List<UserForm> listForm = null;
if(userManagerId==userIdFromSession){
listForm = userService.getAllocatedResourcesBasedOnStartAndExitDate(userIdFromSession, cal);
request.setAttribute("selectResourceBasedOnUserId", selectedUserId);
request.setAttribute("userListSelection", listForm);
}
}
catch(Exception e){
e.printStackTrace();
}
// Added for missing entry reminder Table
List<String> allmissingDateList = new ArrayList<String>();
if(selectedUserId != 0){
UserForm userDetail = userService.getUsernameFromId(selectedUserId);
Date resource_start_dt = userDetail.getStart_date();
String ressource_exit_dt_in_string = userDetail.getExit_date();
Calendar cal_for_resource_start_dt = null;
if(resource_start_dt != null){
cal_for_resource_start_dt = Calendar.getInstance();
cal_for_resource_start_dt.setTime(resource_start_dt);
}
Calendar cal_for_resource_end_dt = null;
if(ressource_exit_dt_in_string != null){
cal_for_resource_end_dt = Calendar.getInstance();
cal_for_resource_end_dt.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(ressource_exit_dt_in_string));
}
Calendar cal_for_current_dt = Calendar.getInstance();
cal_for_current_dt.setTime(df.parse(month));
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) > cal_for_resource_start_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) >= cal_for_resource_start_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_start_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) < cal_for_resource_end_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) <= cal_for_resource_end_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_end_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null){
if( (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)
&& compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, cal_for_resource_end_dt, cal_for_current_dt);
}else if(!(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt))){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& !(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if( comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt, null)
&& comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, null, cal_for_resource_end_dt) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}
}
request.setAttribute("allmissingDateList", allmissingDateList);
// Update for freezing timesheet.
UserForm userDetail = userService.getUsernameFromId(selectedUserId);
Date freezeDate = userDetail.getFreeze_timesheet();
if(freezeDate != null){
Calendar calWithFreezingDate = Calendar.getInstance();
calWithFreezingDate.setTime(freezeDate);
cal.setTime(df.parse(month));
//Checking if freezing date exceeds than selectedMonth then blocking new entry.
if(calWithFreezingDate.after(cal) || calWithFreezingDate.equals(cal)){
forwardedPage = "timeEntryFreezedPage";
}
else{
forwardedPage = "getDetailSuccess";
}
}
else{
forwardedPage = "getDetailSuccess";
}
String locDetail = saveTaskService.getResourceLocation(selectedUserId);
request.setAttribute("resourceLocation", locDetail);
request.setAttribute("month", month);
return mapping.findForward(forwardedPage);
}
else
throw new Exception();
}
// Checking session before providing dynamic row to resources for adding entry.
public void checkSessionValidation(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response)throws Exception{
HttpSession s1 = request.getSession();
logger.info("Printing from checkSessionValidation Method");
if(s1.getAttribute("userName")==null){
response.getWriter().print("session expired");
}
}
public ActionForward freezeTimesheet(ActionMapping map,ActionForm form,HttpServletRequest request,HttpServletResponse response)throws Exception{
// Freezing timesheet for selected resources.
logger.info("Printing from Task Action freezeTimesheet Method.");
String forwardedPage = "";
HttpSession s1 = request.getSession();
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
if(s1.getAttribute("userName")!=null){
try{
String task_month_year = request.getParameter("task_month");
Integer resource_id = Integer.parseInt(request.getParameter("resource_id"));
DateFormat df = new SimpleDateFormat("yyyy-MM");
Calendar cal = Calendar.getInstance();
cal.setTime(df.parse(task_month_year));
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMinimum(Calendar.DAY_OF_MONTH));
saveTaskService.freezeTimesheet(resource_id, cal);
int userIdFromSession = Integer.parseInt(s1.getAttribute("user_id").toString());
List<TaskForm> tList = null;
tList = saveTaskService.getTaskDetails(task_month_year, resource_id);
request.setAttribute("tList", tList);
userService = Factory.getUserService();
try{
List<UserForm> listForm = null;
listForm = userService.getAllocatedResourcesBasedOnStartAndExitDate(userIdFromSession, cal);
request.setAttribute("userListSelection", listForm);
request.setAttribute("selectResourceBasedOnUserId", resource_id);
}
catch(Exception e){
e.printStackTrace();
}
// Added for missing entry reminder Table
List<String> allmissingDateList = new ArrayList<String>();
UserForm userDetail = userService.getUsernameFromId(resource_id);
Date resource_start_dt = userDetail.getStart_date();
String ressource_exit_dt_in_string = userDetail.getExit_date();
Calendar cal_for_resource_start_dt = null;
if(resource_start_dt != null){
cal_for_resource_start_dt = Calendar.getInstance();
cal_for_resource_start_dt.setTime(resource_start_dt);
}
Calendar cal_for_resource_end_dt = null;
if(ressource_exit_dt_in_string != null){
cal_for_resource_end_dt = Calendar.getInstance();
cal_for_resource_end_dt.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(ressource_exit_dt_in_string));
}
Calendar cal_for_current_dt = Calendar.getInstance();
cal_for_current_dt.setTime(df.parse(task_month_year));
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) > cal_for_resource_start_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) >= cal_for_resource_start_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_start_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) < cal_for_resource_end_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) <= cal_for_resource_end_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_end_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null){
if( (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)
&& compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, cal_for_resource_end_dt, cal_for_current_dt);
}else if(!(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt))){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& !(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if( comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt, null)
&& comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, null, cal_for_resource_end_dt) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}
request.setAttribute("allmissingDateList", allmissingDateList);
// Update for freezing timesheet.
Date freezeDate = userDetail.getFreeze_timesheet();
if(freezeDate != null){
Calendar calWithFreezingDate = Calendar.getInstance();
calWithFreezingDate.setTime(freezeDate);
cal.setTime(df.parse(task_month_year));
//Checking if freezing date exceeds than selectedMonth then blocking new entry.
if(calWithFreezingDate.after(cal) || calWithFreezingDate.equals(cal)){
forwardedPage = "timeEntryFreezedPage";
}
else{
forwardedPage = "getDetailSuccess";
}
}
else{
forwardedPage = "getDetailSuccess";
}
String locDetail = saveTaskService.getResourceLocation(resource_id);
request.setAttribute("resourceLocation", locDetail);
request.setAttribute("month", task_month_year);
}
catch (Exception e) {
logger.error(e);
throw new Exception();
}
}
else{
throw new Exception();
}
return map.findForward(forwardedPage);
}
public ActionForward unfreezeTimesheet(ActionMapping map,ActionForm form,HttpServletRequest request,HttpServletResponse response)throws Exception{
// unfreezeTimesheet timesheet for selected resources.
logger.info("Printing from TaskAction unfreezeTimesheet Method.");
String forwardedPage = "";
HttpSession s1 = request.getSession();
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
if(s1.getAttribute("userName")!=null){
try{
String task_month_year = request.getParameter("task_month");
Integer resource_id = Integer.parseInt(request.getParameter("resource_id"));
DateFormat df = new SimpleDateFormat("yyyy-MM");
Calendar cal = Calendar.getInstance();
cal.setTime(df.parse(task_month_year));
saveTaskService.unfreezeTimesheet(resource_id, cal);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(df.parse(task_month_year));
int userIdFromSession = Integer.parseInt(s1.getAttribute("user_id").toString());
List<TaskForm> tList = null;
tList = saveTaskService.getTaskDetails(task_month_year, resource_id);
request.setAttribute("tList", tList);
userService = Factory.getUserService();
try{
List<UserForm> listForm = new ArrayList<UserForm>();
listForm = userService.getAllocatedResourcesBasedOnStartAndExitDate(userIdFromSession, cal2);
request.setAttribute("userListSelection", listForm);
request.setAttribute("selectResourceBasedOnUserId", resource_id);
}
catch(Exception e){
e.printStackTrace();
}
// Added for missing entry reminder Table
List<String> allmissingDateList = new ArrayList<String>();
UserForm userDetail = userService.getUsernameFromId(resource_id);
Date resource_start_dt = userDetail.getStart_date();
String ressource_exit_dt_in_string = userDetail.getExit_date();
Calendar cal_for_resource_start_dt = null;
if(resource_start_dt != null){
cal_for_resource_start_dt = Calendar.getInstance();
cal_for_resource_start_dt.setTime(resource_start_dt);
}
Calendar cal_for_resource_end_dt = null;
if(ressource_exit_dt_in_string != null){
cal_for_resource_end_dt = Calendar.getInstance();
cal_for_resource_end_dt.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(ressource_exit_dt_in_string));
}
Calendar cal_for_current_dt = Calendar.getInstance();
cal_for_current_dt.setTime(df.parse(task_month_year));
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) > cal_for_resource_start_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) >= cal_for_resource_start_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_start_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) < cal_for_resource_end_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) <= cal_for_resource_end_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_end_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null){
if( (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)
&& compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, cal_for_resource_end_dt, cal_for_current_dt);
}else if(!(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt))){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& !(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if( comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt, null)
&& comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, null, cal_for_resource_end_dt) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}
request.setAttribute("allmissingDateList", allmissingDateList);
// Update for unfreezing timesheet.
Date freezeDate = userDetail.getFreeze_timesheet();
if(freezeDate != null){
Calendar calWithFreezingDate = Calendar.getInstance();
calWithFreezingDate.setTime(freezeDate);
cal.setTime(df.parse(task_month_year));
//Checking if freezing date exceeds than selectedMonth then blocking new entry.
if(calWithFreezingDate.before(cal) || calWithFreezingDate.equals(cal)){
forwardedPage = "getDetailSuccess";
}
else{
forwardedPage = "timeEntryFreezedPage";
}
}
else{
forwardedPage = "getDetailSuccess";
}
String locDetail = saveTaskService.getResourceLocation(resource_id);
request.setAttribute("resourceLocation", locDetail);
request.setAttribute("month", task_month_year);
}
catch (Exception e) {
logger.error(e);
throw new Exception();
}
}
else{
throw new Exception();
}
return map.findForward(forwardedPage);
}
public ActionForward getDetailsNotification(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response) throws Exception{
String forwardedPage = "";
HttpSession session = request.getSession();
if(session.getAttribute("userName")!=null){
SaveTaskService saveTaskService = SaveTaskServiceFactory.getSaveTaskService();
int userId = Integer.parseInt(session.getAttribute("user_id").toString());
TaskForm taskForm = (TaskForm) form;
DateFormat df = new SimpleDateFormat("yyyy-MM");
List<TaskForm> tList = null;
String month = taskForm.getMonth();
// Checking previous month time entry for alert Notification on time-entry page
Calendar c1 = Calendar.getInstance();
c1.add(Calendar.MONTH, -1);
month = df.format(c1.getTime());
Calendar cal = Calendar.getInstance();
try {
tList = saveTaskService.getTaskDetails(month, userId);
request.setAttribute("tList", tList);
cal.setTime(df.parse(month));
cal.set(Calendar.DAY_OF_MONTH, cal.getActualMinimum(Calendar.DAY_OF_MONTH));
userService = Factory.getUserService();
try{
int userManagerId = userService.getAdminUserId();
List<UserForm> listForm = null;
if(userManagerId==userId){
// Write logic
listForm = userService.getAllocatedResourcesBasedOnStartAndExitDate(userId, cal);
request.setAttribute("selectResourceBasedOnUserId", userManagerId);
request.setAttribute("userListSelection", listForm);
}
}
catch(Exception e){
e.printStackTrace();
}
} catch(Exception e){
e.printStackTrace();
throw new Exception();
}
// Added for missing entry reminder Table
UserForm userDetail = userService.getUsernameFromId(userId);
Date resource_start_dt = userDetail.getStart_date();
String ressource_exit_dt_in_string = userDetail.getExit_date();
Calendar cal_for_resource_start_dt = null;
if(resource_start_dt != null){
cal_for_resource_start_dt = Calendar.getInstance();
cal_for_resource_start_dt.setTime(resource_start_dt);
}
Calendar cal_for_resource_end_dt = null;
if(ressource_exit_dt_in_string != null){
cal_for_resource_end_dt = Calendar.getInstance();
cal_for_resource_end_dt.setTime(new SimpleDateFormat("yyyy-MM-dd").parse(ressource_exit_dt_in_string));
}
Calendar cal_for_current_dt = Calendar.getInstance();
if(month != null){
cal_for_current_dt.setTime(df.parse(month));
}
List<String> allmissingDateList = new ArrayList<String>();
if(cal_for_resource_start_dt == null && cal_for_resource_end_dt == null){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt == null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) > cal_for_resource_start_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) >= cal_for_resource_start_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_start_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt == null && cal_for_resource_end_dt != null){
if(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((cal_for_current_dt.get(Calendar.YEAR) < cal_for_resource_end_dt.get(Calendar.YEAR))
|| cal_for_current_dt.get(Calendar.MONTH) <= cal_for_resource_end_dt.get(Calendar.MONTH) && cal_for_current_dt.get(Calendar.YEAR) == cal_for_resource_end_dt.get(Calendar.YEAR) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}else if(cal_for_resource_start_dt != null && cal_for_resource_end_dt != null){
if( (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt)
&& compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, cal_for_resource_end_dt, cal_for_current_dt);
}else if(!(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& (compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt))){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, cal_for_resource_end_dt, cal_for_current_dt);
}else if((compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt))
&& !(compareCalendarEqualityBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_end_dt)) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(cal_for_resource_start_dt, null, cal_for_current_dt);
}else if( comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, cal_for_resource_start_dt, null)
&& comapareCalendarMonthAndYearBasedOnMonthAndYear(cal_for_current_dt, null, cal_for_resource_end_dt) ){
allmissingDateList = TimeUtility.getAllWorkingDatesForSelectedMonthIncludingStartEndDateIn_dd_format(null, null, cal_for_current_dt);
}
}
request.setAttribute("allmissingDateList", allmissingDateList);
String locDetail = saveTaskService.getResourceLocation(userId);
Date freezeDate = userDetail.getFreeze_timesheet();
if(freezeDate != null){
Calendar calWithFreezingDate = Calendar.getInstance();
calWithFreezingDate.setTime(freezeDate);
cal.setTime(df.parse(month));
//Checking if freezing date exceeds than selectedMonth then blocking new entry.
if(calWithFreezingDate.after(cal) || calWithFreezingDate.equals(cal)){
forwardedPage = "timeEntryFreezedPage";
}
else{
forwardedPage = "getDetailSuccess";
}
}
else{
forwardedPage = "getDetailSuccess";
}
request.setAttribute("resourceLocation", locDetail);
request.setAttribute("month", month);
return mapping.findForward(forwardedPage);
}
else
throw new Exception();
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.android.datatransport.cct;
import static android.content.pm.PackageManager.NameNotFoundException;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.client.WireMock.absent;
import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
import static com.github.tomakehurst.wiremock.client.WireMock.matchingJsonPath;
import static com.github.tomakehurst.wiremock.client.WireMock.notMatching;
import static com.github.tomakehurst.wiremock.client.WireMock.post;
import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor;
import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
import static com.github.tomakehurst.wiremock.client.WireMock.verify;
import static com.google.android.datatransport.cct.CctTransportBackend.getTzOffset;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import androidx.test.core.app.ApplicationProvider;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.google.android.datatransport.Encoding;
import com.google.android.datatransport.backend.cct.BuildConfig;
import com.google.android.datatransport.cct.internal.ClientInfo;
import com.google.android.datatransport.cct.internal.NetworkConnectionInfo;
import com.google.android.datatransport.runtime.EncodedPayload;
import com.google.android.datatransport.runtime.EventInternal;
import com.google.android.datatransport.runtime.backends.BackendRequest;
import com.google.android.datatransport.runtime.backends.BackendResponse;
import com.google.android.datatransport.runtime.time.TestClock;
import com.google.protobuf.ByteString;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Locale;
import java.util.zip.GZIPOutputStream;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
@RunWith(RobolectricTestRunner.class)
public class CctTransportBackendTest {
private static final long INITIAL_WALL_TIME = 200L;
private static final long INITIAL_UPTIME = 10L;
private static final ByteString PAYLOAD =
ByteString.copyFrom("TelemetryData".getBytes(Charset.defaultCharset()));
private static final String PAYLOAD_BYTE64 = "VGVsZW1ldHJ5RGF0YQ==";
private static final String JSON_PAYLOAD = "{\"hello\": false}";
private static final String JSON_PAYLOAD_ESCAPED = "{\\\"hello\\\": false}";
private static final int CODE = 5;
private static final String TEST_NAME = "hello";
private static final Encoding PROTOBUF_ENCODING = Encoding.of("proto");
private static final Encoding JSON_ENCODING = Encoding.of("json");
private static final String TEST_ENDPOINT = "http://localhost:8999/api";
private static final String API_KEY = "api_key";
private static final String CCT_TRANSPORT_NAME = "3";
private static final String LEGACY_TRANSPORT_NAME = "3";
private TestClock wallClock = new TestClock(INITIAL_WALL_TIME);
private TestClock uptimeClock = new TestClock(INITIAL_UPTIME);
private CctTransportBackend BACKEND =
new CctTransportBackend(ApplicationProvider.getApplicationContext(), wallClock, uptimeClock);
@Rule public WireMockRule wireMockRule = new WireMockRule(8999);
private BackendRequest getCCTBackendRequest() {
return getCCTBackendRequest(CCT_TRANSPORT_NAME, new CCTDestination(TEST_ENDPOINT, null));
}
private BackendRequest getCCTBackendRequest(String transportName, CCTDestination destination) {
return BackendRequest.builder()
.setEvents(
Arrays.asList(
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName(transportName)
.setEncodedPayload(
new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.build()),
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName(transportName)
.setEncodedPayload(
new EncodedPayload(
JSON_ENCODING, JSON_PAYLOAD.getBytes(Charset.defaultCharset())))
.setCode(CODE)
.build())))
.setExtras(destination.getExtras())
.build();
}
@Test
public void testCCTSuccessLoggingRequest() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
ConnectivityManager connectivityManager =
(ConnectivityManager)
ApplicationProvider.getApplicationContext()
.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader(
"User-Agent",
equalTo(String.format("datatransport/%s android/", BuildConfig.VERSION_NAME)))
.withHeader("Content-Type", equalTo("application/json"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest.size() == 1)]"))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].requestTimeMs == %s)]", wallClock.getTime())))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].requestUptimeMs == %s)]", uptimeClock.getTime())))
.withRequestBody(matchingJsonPath("$[?(@.logRequest[0].logEvent.size() == 2)]"))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[0].eventTimeMs == \"%s\")]",
INITIAL_WALL_TIME)))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[0].eventUptimeMs == \"%s\")]",
INITIAL_UPTIME)))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[0].sourceExtension == \"%s\")]",
PAYLOAD_BYTE64)))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[0].timezoneOffsetSeconds == \"%s\")]",
getTzOffset())))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[0].networkConnectionInfo.networkType == \"%s\")]",
NetworkConnectionInfo.NetworkType.forNumber(activeNetworkInfo.getType()))))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[0].networkConnectionInfo.mobileSubtype == \"%s\")]",
NetworkConnectionInfo.MobileSubtype.forNumber(
activeNetworkInfo.getSubtype()))))
.withRequestBody(notMatching("$[?(@.logRequest[0].logEvent[0].eventCode)]"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest[0].logEvent[1].eventCode == 5)]"))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].logEvent[1].sourceExtensionJsonProto3 == \"%s\")]",
JSON_PAYLOAD_ESCAPED))));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void testCCTContainsRightAndroidClientInfo() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader(
"User-Agent",
equalTo(String.format("datatransport/%s android/", BuildConfig.VERSION_NAME)))
.withHeader("Content-Type", equalTo("application/json"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest.size() == 1)]"))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].clientInfo.clientType == \"%s\")]",
ClientInfo.ClientType.ANDROID_FIREBASE)))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].clientInfo.androidClientInfo.locale == \"%s\")]",
Locale.getDefault().getLanguage())))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].clientInfo.androidClientInfo.country == \"%s\")]",
Locale.getDefault().getCountry())))
// MCC/MNC is empty in roboelectric
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].clientInfo.androidClientInfo.mccMnc == \"\")]"))));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void testCCTContainsRightApplicationBuild() throws NameNotFoundException {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader(
"User-Agent",
equalTo(String.format("datatransport/%s android/", BuildConfig.VERSION_NAME)))
.withHeader("Content-Type", equalTo("application/json"))
.withRequestBody(
matchingJsonPath(
String.format(
"$[?(@.logRequest[0].clientInfo.androidClientInfo.applicationBuild == \"%s\")]",
ApplicationProvider.getApplicationContext()
.getPackageManager()
.getPackageInfo(
ApplicationProvider.getApplicationContext().getPackageName(),
/* flags= */ 0)
.versionCode))));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void testLegacyFlgSuccessLoggingRequest_containsAPIKey() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
wallClock.tick();
uptimeClock.tick();
BACKEND.send(
getCCTBackendRequest(LEGACY_TRANSPORT_NAME, new CCTDestination(TEST_ENDPOINT, API_KEY)));
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader(CctTransportBackend.API_KEY_HEADER_KEY, equalTo(API_KEY)));
}
@Test
public void testLegacyFlgSuccessLoggingRequest_containUrl() {
final String customHostname = "http://localhost:8999";
stubFor(
post(urlEqualTo("/custom_api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
wallClock.tick();
uptimeClock.tick();
BACKEND.send(
getCCTBackendRequest(
LEGACY_TRANSPORT_NAME, new CCTDestination(customHostname + "/custom_api", null)));
verify(
postRequestedFor(urlEqualTo("/custom_api"))
.withHeader(CctTransportBackend.API_KEY_HEADER_KEY, absent()));
}
@Test
public void testLegacyFlgSuccessLoggingRequest_containsAPIKeyAndUrl() {
final String customHostname = "http://localhost:8999";
stubFor(
post(urlEqualTo("/custom_api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
wallClock.tick();
uptimeClock.tick();
BACKEND.send(
getCCTBackendRequest(
LEGACY_TRANSPORT_NAME, new CCTDestination(customHostname + "/custom_api", API_KEY)));
verify(
postRequestedFor(urlEqualTo("/custom_api"))
.withHeader(CctTransportBackend.API_KEY_HEADER_KEY, equalTo(API_KEY)));
}
@Test
public void testLegacyFlgSuccessLoggingRequest_corruptedExtras()
throws UnsupportedEncodingException {
BackendRequest request =
BackendRequest.builder()
.setEvents(
Arrays.asList(
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName("4")
.setEncodedPayload(
new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.build()),
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName("4")
.setEncodedPayload(
new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.setCode(CODE)
.build())))
.setExtras("not a valid extras".getBytes("UTF-8"))
.build();
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(request);
assertThat(response.getStatus()).isEqualTo(BackendResponse.Status.FATAL_ERROR);
}
@Test
public void testUnsuccessfulLoggingRequest() {
stubFor(post(urlEqualTo("/api")).willReturn(aResponse().withStatus(404)));
BackendResponse response = BACKEND.send(getCCTBackendRequest());
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.transientError(), response);
}
@Test
public void testServerErrorLoggingRequest() {
stubFor(post(urlEqualTo("/api")).willReturn(aResponse().withStatus(500)));
BackendResponse response = BACKEND.send(getCCTBackendRequest());
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.transientError(), response);
}
@Test
public void testGarbageFromServer() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"status\":\"Error\",\"message\":\"Endpoint not found\"}")));
BackendResponse response = BACKEND.send(getCCTBackendRequest());
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.transientError(), response);
}
@Test
public void testNonHandledResponseCode() {
stubFor(post(urlEqualTo("/api")).willReturn(aResponse().withStatus(300)));
BackendResponse response = BACKEND.send(getCCTBackendRequest());
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.fatalError(), response);
}
@Test
public void send_whenBackendResponseTimesOut_shouldReturnTransientError() {
CctTransportBackend backend =
new CctTransportBackend(
ApplicationProvider.getApplicationContext(), wallClock, uptimeClock, 300);
stubFor(post(urlEqualTo("/api")).willReturn(aResponse().withFixedDelay(500)));
BackendResponse response = backend.send(getCCTBackendRequest());
assertEquals(BackendResponse.transientError(), response);
}
@Test
public void decorate_whenOnline_shouldProperlyPopulateNetworkInfo() {
CctTransportBackend backend =
new CctTransportBackend(
ApplicationProvider.getApplicationContext(), wallClock, uptimeClock, 300);
EventInternal result =
backend.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName("3")
.setEncodedPayload(new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.build());
assertThat(result.get(CctTransportBackend.KEY_NETWORK_TYPE))
.isEqualTo(String.valueOf(NetworkConnectionInfo.NetworkType.MOBILE.getValue()));
assertThat(result.get(CctTransportBackend.KEY_MOBILE_SUBTYPE))
.isEqualTo(String.valueOf(NetworkConnectionInfo.MobileSubtype.EDGE.getValue()));
}
@Test
@Config(shadows = {OfflineConnectivityManagerShadow.class})
public void decorate_whenOffline_shouldProperlyPopulateNetworkInfo() {
CctTransportBackend backend =
new CctTransportBackend(
ApplicationProvider.getApplicationContext(), wallClock, uptimeClock, 300);
EventInternal result =
backend.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName("3")
.setEncodedPayload(new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.build());
assertThat(result.get(CctTransportBackend.KEY_NETWORK_TYPE))
.isEqualTo(String.valueOf(NetworkConnectionInfo.NetworkType.NONE.getValue()));
assertThat(result.get(CctTransportBackend.KEY_MOBILE_SUBTYPE))
.isEqualTo(
String.valueOf(NetworkConnectionInfo.MobileSubtype.UNKNOWN_MOBILE_SUBTYPE.getValue()));
}
@Test
public void send_whenBackendRedirects_shouldCorrectlyFollowTheRedirectViaPost() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse().withStatus(302).withHeader("Location", TEST_ENDPOINT + "/hello")));
stubFor(
post(urlEqualTo("/api/hello"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
verify(
postRequestedFor(urlEqualTo("/api/hello"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void send_whenBackendRedirectswith307_shouldCorrectlyFollowTheRedirectViaPost() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse().withStatus(307).withHeader("Location", TEST_ENDPOINT + "/hello")));
stubFor(
post(urlEqualTo("/api/hello"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withBody("{\"nextRequestWaitMillis\":3}")));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
verify(
postRequestedFor(urlEqualTo("/api/hello"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void send_whenBackendRedirectsMoreThan5Times_shouldOnlyRedirect4Times() {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse().withStatus(302).withHeader("Location", TEST_ENDPOINT + "/hello")));
stubFor(
post(urlEqualTo("/api/hello"))
.willReturn(
aResponse().withStatus(302).withHeader("Location", TEST_ENDPOINT + "/hello")));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json")));
verify(
4,
postRequestedFor(urlEqualTo("/api/hello"))
.withHeader("Content-Type", equalTo("application/json")));
assertEquals(BackendResponse.fatalError(), response);
}
@Test
public void send_CompressedResponseIsUncompressed() throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(output);
gzipOutputStream.write("{\"nextRequestWaitMillis\":3}".getBytes(Charset.forName("UTF-8")));
gzipOutputStream.close();
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withHeader("Content-Encoding", "gzip")
.withBody(output.toByteArray())));
BackendRequest backendRequest = getCCTBackendRequest();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json"))
.withHeader("Content-Encoding", equalTo("gzip")));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void send_whenLogSourceIsSetByName_shouldSetItToProperField() throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
GZIPOutputStream gzipOutputStream = new GZIPOutputStream(output);
gzipOutputStream.write("{\"nextRequestWaitMillis\":3}".getBytes(Charset.forName("UTF-8")));
gzipOutputStream.close();
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;hello=world")
.withHeader("Content-Encoding", "gzip")
.withBody(output.toByteArray())));
BackendRequest backendRequest =
BackendRequest.builder()
.setEvents(
Arrays.asList(
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName("3")
.setEncodedPayload(
new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.build()),
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName(TEST_NAME)
.setEncodedPayload(
new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.setCode(CODE)
.build())))
.setExtras(new CCTDestination(TEST_ENDPOINT, null).getExtras())
.build();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json"))
.withHeader("Content-Encoding", equalTo("gzip"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest.size() == 2)]"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest[0].logSource == 3)]"))
.withRequestBody(
matchingJsonPath(
String.format("$[?(@.logRequest[1].logSourceName == \"%s\")]", TEST_NAME))));
assertEquals(BackendResponse.ok(3), response);
}
@Test
public void send_withEventsOfUnsupportedEncoding_shouldBeSkipped() throws IOException {
stubFor(
post(urlEqualTo("/api"))
.willReturn(
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json;charset=UTF8;")
.withBody("{\"nextRequestWaitMillis\":3}")));
BackendRequest backendRequest =
BackendRequest.builder()
.setEvents(
Arrays.asList(
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName("3")
.setEncodedPayload(
new EncodedPayload(Encoding.of("yaml"), PAYLOAD.toByteArray()))
.build()),
BACKEND.decorate(
EventInternal.builder()
.setEventMillis(INITIAL_WALL_TIME)
.setUptimeMillis(INITIAL_UPTIME)
.setTransportName(TEST_NAME)
.setEncodedPayload(
new EncodedPayload(PROTOBUF_ENCODING, PAYLOAD.toByteArray()))
.setCode(CODE)
.build())))
.setExtras(new CCTDestination(TEST_ENDPOINT, null).getExtras())
.build();
wallClock.tick();
uptimeClock.tick();
BackendResponse response = BACKEND.send(backendRequest);
verify(
postRequestedFor(urlEqualTo("/api"))
.withHeader("Content-Type", equalTo("application/json"))
.withHeader("Content-Encoding", equalTo("gzip"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest.size() == 2)]"))
.withRequestBody(matchingJsonPath("$[?(@.logRequest[0].logSource == 3)]"))
.withRequestBody(notMatching("$[?(@.logRequest[0].logEvent)]"))
.withRequestBody(
matchingJsonPath(
String.format("$[?(@.logRequest[1].logSourceName == \"%s\")]", TEST_NAME)))
.withRequestBody(matchingJsonPath("$[?(@.logRequest[1].logEvent.size() == 1)]")));
assertEquals(BackendResponse.ok(3), response);
}
// When there is no active network, the ConnectivityManager returns null when
// getActiveNetworkInfo() is called.
@Implements(ConnectivityManager.class)
public static class OfflineConnectivityManagerShadow {
@Implementation
public NetworkInfo getActiveNetworkInfo() {
return null;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/services/campaign_draft_service.proto
package com.google.ads.googleads.v8.services;
/**
* <pre>
* Request message for [CampaignDraftService.MutateCampaignDrafts][google.ads.googleads.v8.services.CampaignDraftService.MutateCampaignDrafts].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.MutateCampaignDraftsRequest}
*/
public final class MutateCampaignDraftsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.MutateCampaignDraftsRequest)
MutateCampaignDraftsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateCampaignDraftsRequest.newBuilder() to construct.
private MutateCampaignDraftsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateCampaignDraftsRequest() {
customerId_ = "";
operations_ = java.util.Collections.emptyList();
responseContentType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new MutateCampaignDraftsRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateCampaignDraftsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
customerId_ = s;
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
operations_ = new java.util.ArrayList<com.google.ads.googleads.v8.services.CampaignDraftOperation>();
mutable_bitField0_ |= 0x00000001;
}
operations_.add(
input.readMessage(com.google.ads.googleads.v8.services.CampaignDraftOperation.parser(), extensionRegistry));
break;
}
case 24: {
partialFailure_ = input.readBool();
break;
}
case 32: {
validateOnly_ = input.readBool();
break;
}
case 40: {
int rawValue = input.readEnum();
responseContentType_ = rawValue;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
operations_ = java.util.Collections.unmodifiableList(operations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v8_services_MutateCampaignDraftsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v8_services_MutateCampaignDraftsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.class, com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.Builder.class);
}
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
private volatile java.lang.Object customerId_;
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int OPERATIONS_FIELD_NUMBER = 2;
private java.util.List<com.google.ads.googleads.v8.services.CampaignDraftOperation> operations_;
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v8.services.CampaignDraftOperation> getOperationsList() {
return operations_;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder>
getOperationsOrBuilderList() {
return operations_;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public int getOperationsCount() {
return operations_.size();
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v8.services.CampaignDraftOperation getOperations(int index) {
return operations_.get(index);
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder getOperationsOrBuilder(
int index) {
return operations_.get(index);
}
public static final int PARTIAL_FAILURE_FIELD_NUMBER = 3;
private boolean partialFailure_;
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @return The partialFailure.
*/
@java.lang.Override
public boolean getPartialFailure() {
return partialFailure_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 4;
private boolean validateOnly_;
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
public static final int RESPONSE_CONTENT_TYPE_FIELD_NUMBER = 5;
private int responseContentType_;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The responseContentType.
*/
@java.lang.Override public com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType.valueOf(responseContentType_);
return result == null ? com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
for (int i = 0; i < operations_.size(); i++) {
output.writeMessage(2, operations_.get(i));
}
if (partialFailure_ != false) {
output.writeBool(3, partialFailure_);
}
if (validateOnly_ != false) {
output.writeBool(4, validateOnly_);
}
if (responseContentType_ != com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
output.writeEnum(5, responseContentType_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
for (int i = 0; i < operations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, operations_.get(i));
}
if (partialFailure_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, partialFailure_);
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, validateOnly_);
}
if (responseContentType_ != com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(5, responseContentType_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest other = (com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (!getOperationsList()
.equals(other.getOperationsList())) return false;
if (getPartialFailure()
!= other.getPartialFailure()) return false;
if (getValidateOnly()
!= other.getValidateOnly()) return false;
if (responseContentType_ != other.responseContentType_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (getOperationsCount() > 0) {
hash = (37 * hash) + OPERATIONS_FIELD_NUMBER;
hash = (53 * hash) + getOperationsList().hashCode();
}
hash = (37 * hash) + PARTIAL_FAILURE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getPartialFailure());
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getValidateOnly());
hash = (37 * hash) + RESPONSE_CONTENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + responseContentType_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for [CampaignDraftService.MutateCampaignDrafts][google.ads.googleads.v8.services.CampaignDraftService.MutateCampaignDrafts].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.MutateCampaignDraftsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.MutateCampaignDraftsRequest)
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v8_services_MutateCampaignDraftsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v8_services_MutateCampaignDraftsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.class, com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getOperationsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
customerId_ = "";
if (operationsBuilder_ == null) {
operations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
operationsBuilder_.clear();
}
partialFailure_ = false;
validateOnly_ = false;
responseContentType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v8_services_MutateCampaignDraftsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest build() {
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest buildPartial() {
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest result = new com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest(this);
int from_bitField0_ = bitField0_;
result.customerId_ = customerId_;
if (operationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
operations_ = java.util.Collections.unmodifiableList(operations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.operations_ = operations_;
} else {
result.operations_ = operationsBuilder_.build();
}
result.partialFailure_ = partialFailure_;
result.validateOnly_ = validateOnly_;
result.responseContentType_ = responseContentType_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest) {
return mergeFrom((com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest other) {
if (other == com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
onChanged();
}
if (operationsBuilder_ == null) {
if (!other.operations_.isEmpty()) {
if (operations_.isEmpty()) {
operations_ = other.operations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOperationsIsMutable();
operations_.addAll(other.operations_);
}
onChanged();
}
} else {
if (!other.operations_.isEmpty()) {
if (operationsBuilder_.isEmpty()) {
operationsBuilder_.dispose();
operationsBuilder_ = null;
operations_ = other.operations_;
bitField0_ = (bitField0_ & ~0x00000001);
operationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getOperationsFieldBuilder() : null;
} else {
operationsBuilder_.addAllMessages(other.operations_);
}
}
}
if (other.getPartialFailure() != false) {
setPartialFailure(other.getPartialFailure());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
if (other.responseContentType_ != 0) {
setResponseContentTypeValue(other.getResponseContentTypeValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
customerId_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer whose campaign drafts are being modified.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
customerId_ = value;
onChanged();
return this;
}
private java.util.List<com.google.ads.googleads.v8.services.CampaignDraftOperation> operations_ =
java.util.Collections.emptyList();
private void ensureOperationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
operations_ = new java.util.ArrayList<com.google.ads.googleads.v8.services.CampaignDraftOperation>(operations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v8.services.CampaignDraftOperation, com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder, com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder> operationsBuilder_;
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v8.services.CampaignDraftOperation> getOperationsList() {
if (operationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(operations_);
} else {
return operationsBuilder_.getMessageList();
}
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public int getOperationsCount() {
if (operationsBuilder_ == null) {
return operations_.size();
} else {
return operationsBuilder_.getCount();
}
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v8.services.CampaignDraftOperation getOperations(int index) {
if (operationsBuilder_ == null) {
return operations_.get(index);
} else {
return operationsBuilder_.getMessage(index);
}
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setOperations(
int index, com.google.ads.googleads.v8.services.CampaignDraftOperation value) {
if (operationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOperationsIsMutable();
operations_.set(index, value);
onChanged();
} else {
operationsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setOperations(
int index, com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder builderForValue) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.set(index, builderForValue.build());
onChanged();
} else {
operationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(com.google.ads.googleads.v8.services.CampaignDraftOperation value) {
if (operationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOperationsIsMutable();
operations_.add(value);
onChanged();
} else {
operationsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(
int index, com.google.ads.googleads.v8.services.CampaignDraftOperation value) {
if (operationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOperationsIsMutable();
operations_.add(index, value);
onChanged();
} else {
operationsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(
com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder builderForValue) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.add(builderForValue.build());
onChanged();
} else {
operationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addOperations(
int index, com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder builderForValue) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.add(index, builderForValue.build());
onChanged();
} else {
operationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder addAllOperations(
java.lang.Iterable<? extends com.google.ads.googleads.v8.services.CampaignDraftOperation> values) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, operations_);
onChanged();
} else {
operationsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearOperations() {
if (operationsBuilder_ == null) {
operations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
operationsBuilder_.clear();
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder removeOperations(int index) {
if (operationsBuilder_ == null) {
ensureOperationsIsMutable();
operations_.remove(index);
onChanged();
} else {
operationsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder getOperationsBuilder(
int index) {
return getOperationsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder getOperationsOrBuilder(
int index) {
if (operationsBuilder_ == null) {
return operations_.get(index); } else {
return operationsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<? extends com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder>
getOperationsOrBuilderList() {
if (operationsBuilder_ != null) {
return operationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(operations_);
}
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder addOperationsBuilder() {
return getOperationsFieldBuilder().addBuilder(
com.google.ads.googleads.v8.services.CampaignDraftOperation.getDefaultInstance());
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder addOperationsBuilder(
int index) {
return getOperationsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v8.services.CampaignDraftOperation.getDefaultInstance());
}
/**
* <pre>
* Required. The list of operations to perform on individual campaign drafts.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.services.CampaignDraftOperation operations = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public java.util.List<com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder>
getOperationsBuilderList() {
return getOperationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v8.services.CampaignDraftOperation, com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder, com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder>
getOperationsFieldBuilder() {
if (operationsBuilder_ == null) {
operationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v8.services.CampaignDraftOperation, com.google.ads.googleads.v8.services.CampaignDraftOperation.Builder, com.google.ads.googleads.v8.services.CampaignDraftOperationOrBuilder>(
operations_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
operations_ = null;
}
return operationsBuilder_;
}
private boolean partialFailure_ ;
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @return The partialFailure.
*/
@java.lang.Override
public boolean getPartialFailure() {
return partialFailure_;
}
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @param value The partialFailure to set.
* @return This builder for chaining.
*/
public Builder setPartialFailure(boolean value) {
partialFailure_ = value;
onChanged();
return this;
}
/**
* <pre>
* If true, successful operations will be carried out and invalid
* operations will return errors. If false, all operations will be carried
* out in one transaction if and only if they are all valid.
* Default is false.
* </pre>
*
* <code>bool partial_failure = 3;</code>
* @return This builder for chaining.
*/
public Builder clearPartialFailure() {
partialFailure_ = false;
onChanged();
return this;
}
private boolean validateOnly_ ;
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
onChanged();
return this;
}
/**
* <pre>
* If true, the request is validated but not executed. Only errors are
* returned, not results.
* </pre>
*
* <code>bool validate_only = 4;</code>
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
validateOnly_ = false;
onChanged();
return this;
}
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @param value The enum numeric value on the wire for responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentTypeValue(int value) {
responseContentType_ = value;
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return The responseContentType.
*/
@java.lang.Override
public com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType.valueOf(responseContentType_);
return result == null ? com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @param value The responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentType(com.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType value) {
if (value == null) {
throw new NullPointerException();
}
responseContentType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned post mutation.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 5;</code>
* @return This builder for chaining.
*/
public Builder clearResponseContentType() {
responseContentType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.MutateCampaignDraftsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.MutateCampaignDraftsRequest)
private static final com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest();
}
public static com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateCampaignDraftsRequest>
PARSER = new com.google.protobuf.AbstractParser<MutateCampaignDraftsRequest>() {
@java.lang.Override
public MutateCampaignDraftsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateCampaignDraftsRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<MutateCampaignDraftsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateCampaignDraftsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.MutateCampaignDraftsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/* Primitive-type-only definitions (values) */
/*
* Copyright (C) 2002-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.longs;
import it.unimi.dsi.fastutil.Hash;
import it.unimi.dsi.fastutil.HashCommon;
import it.unimi.dsi.fastutil.booleans.BooleanArrays;
import static it.unimi.dsi.fastutil.HashCommon.arraySize;
import static it.unimi.dsi.fastutil.HashCommon.maxFill;
import java.util.Map;
import java.util.NoSuchElementException;
import it.unimi.dsi.fastutil.shorts.ShortCollection;
import it.unimi.dsi.fastutil.shorts.AbstractShortCollection;
import it.unimi.dsi.fastutil.shorts.ShortIterator;
import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
/** A type-specific hash map with a fast, small-footprint implementation whose {@linkplain it.unimi.dsi.fastutil.Hash.Strategy hashing strategy}
* is specified at creation time.
*
* <P>Instances of this class use a hash table to represent a map. The table is
* enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
* smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
* methods} lets you control the size of the table; this is particularly useful
* if you reuse instances of this class.
*
* <p><strong>Warning:</strong> The implementation of this class has significantly
* changed in <code>fastutil</code> 6.1.0. Please read the
* comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
*
* @see Hash
* @see HashCommon
*/
public class Long2ShortOpenCustomHashMap extends AbstractLong2ShortMap implements java.io.Serializable, Cloneable, Hash {
private static final long serialVersionUID = 0L;
private static final boolean ASSERTS = false;
/** The array of keys. */
protected transient long key[];
/** The array of values. */
protected transient short value[];
/** The array telling whether a position is used. */
protected transient boolean used[];
/** The acceptable load factor. */
protected final float f;
/** The current table size. */
protected transient int n;
/** Threshold after which we rehash. It must be the table size times {@link #f}. */
protected transient int maxFill;
/** The mask for wrapping a position counter. */
protected transient int mask;
/** Number of entries in the set. */
protected int size;
/** Cached set of entries. */
protected transient volatile FastEntrySet entries;
/** Cached set of keys. */
protected transient volatile LongSet keys;
/** Cached collection of values. */
protected transient volatile ShortCollection values;
/** The hash strategy of this custom map. */
protected it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy;
/** Creates a new hash map.
*
* <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
*
* @param expected the expected number of elements in the hash set.
* @param f the load factor.
* @param strategy the strategy.
*/
@SuppressWarnings("unchecked")
public Long2ShortOpenCustomHashMap( final int expected, final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this.strategy = strategy;
if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
this.f = f;
n = arraySize( expected, f );
mask = n - 1;
maxFill = maxFill( n, f );
key = new long[ n ];
value = new short[ n ];
used = new boolean[ n ];
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*
* @param expected the expected number of elements in the hash map.
* @param strategy the strategy.
*/
public Long2ShortOpenCustomHashMap( final int expected, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( expected, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries
* and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
* @param strategy the strategy.
*/
public Long2ShortOpenCustomHashMap( final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map copying a given one.
*
* @param m a {@link Map} to be copied into the new hash map.
* @param f the load factor.
* @param strategy the strategy.
*/
public Long2ShortOpenCustomHashMap( final Map<? extends Long, ? extends Short> m, final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m.size(), f, strategy );
putAll( m );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
*
* @param m a {@link Map} to be copied into the new hash map.
* @param strategy the strategy.
*/
public Long2ShortOpenCustomHashMap( final Map<? extends Long, ? extends Short> m, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map copying a given type-specific one.
*
* @param m a type-specific map to be copied into the new hash map.
* @param f the load factor.
* @param strategy the strategy.
*/
public Long2ShortOpenCustomHashMap( final Long2ShortMap m, final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m.size(), f, strategy );
putAll( m );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
*
* @param m a type-specific map to be copied into the new hash map.
* @param strategy the strategy.
*/
public Long2ShortOpenCustomHashMap( final Long2ShortMap m, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( m, DEFAULT_LOAD_FACTOR, strategy );
}
/** Creates a new hash map using the elements of two parallel arrays.
*
* @param k the array of keys of the new hash map.
* @param v the array of corresponding values in the new hash map.
* @param f the load factor.
* @param strategy the strategy.
* @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
*/
public Long2ShortOpenCustomHashMap( final long[] k, final short v[], final float f, final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( k.length, f, strategy );
if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
*
* @param k the array of keys of the new hash map.
* @param v the array of corresponding values in the new hash map.
* @param strategy the strategy.
* @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
*/
public Long2ShortOpenCustomHashMap( final long[] k, final short v[], final it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy ) {
this( k, v, DEFAULT_LOAD_FACTOR, strategy );
}
/** Returns the hashing strategy.
*
* @return the hashing strategy of this custom hash map.
*/
public it.unimi.dsi.fastutil.longs.LongHash.Strategy strategy() {
return strategy;
}
/*
* The following methods implements some basic building blocks used by
* all accessors. They are (and should be maintained) identical to those used in OpenHashSet.drv.
*/
public short put(final long k, final short v) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
final short oldValue = value[ pos ];
value[ pos ] = v;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return defRetValue;
}
public Short put( final Long ok, final Short ov ) {
final short v = ((ov).shortValue());
final long k = ((ok).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
final Short oldValue = (Short.valueOf(value[ pos ]));
value[ pos ] = v;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return (null);
}
/** Adds an increment to value currently associated with a key.
*
* @param k the key.
* @param incr the increment.
* @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
* @deprecated use <code>addTo()</code> instead; having the same name of a {@link java.util.Set} method turned out to be a recipe for disaster.
*/
@Deprecated
public short add(final long k, final short incr) {
return addTo( k, incr );
}
/** Adds an increment to value currently associated with a key.
*
* <P>Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when
* called with a key that does not currently appears in the map, the key
* will be associated with the default return value plus
* the given increment.
*
* @param k the key.
* @param incr the increment.
* @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
*/
public short addTo(final long k, final short incr) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
final short oldValue = value[ pos ];
value[ pos ] += incr;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = (short)(defRetValue + incr);
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return defRetValue;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final int shiftKeys( int pos ) {
// Shift entries with the same hash.
int last, slot;
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( used[ pos ] ) {
slot = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(key[ pos ]) ) ) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! used[ pos ] ) break;
key[ last ] = key[ pos ];
value[ last ] = value[ pos ];
}
used[ last ] = false;
return last;
}
@SuppressWarnings("unchecked")
public short remove( final long k ) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
size--;
final short v = value[ pos ];
shiftKeys( pos );
return v;
}
pos = ( pos + 1 ) & mask;
}
return defRetValue;
}
@SuppressWarnings("unchecked")
public Short remove( final Object ok ) {
final long k = ((((Long)(ok)).longValue()));
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
size--;
final short v = value[ pos ];
shiftKeys( pos );
return (Short.valueOf(v));
}
pos = ( pos + 1 ) & mask;
}
return (null);
}
public Short get( final Long ok ) {
final long k = ((ok).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode( k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), ( k) ) ) ) return (Short.valueOf(value[ pos ]));
pos = ( pos + 1 ) & mask;
}
return (null);
}
@SuppressWarnings("unchecked")
public short get( final long k ) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return value[ pos ];
pos = ( pos + 1 ) & mask;
}
return defRetValue;
}
@SuppressWarnings("unchecked")
public boolean containsKey( final long k ) {
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return true;
pos = ( pos + 1 ) & mask;
}
return false;
}
public boolean containsValue( final short v ) {
final short value[] = this.value;
final boolean used[] = this.used;
for( int i = n; i-- != 0; ) if ( used[ i ] && ( (value[ i ]) == (v) ) ) return true;
return false;
}
/* Removes all elements from this map.
*
* <P>To increase object reuse, this method does not change the table size.
* If you want to reduce the table size, you must use {@link #trim()}.
*
*/
public void clear() {
if ( size == 0 ) return;
size = 0;
BooleanArrays.fill( used, false );
// We null all object entries so that the garbage collector can do its work.
}
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
/** A no-op for backward compatibility.
*
* @param growthFactor unused.
* @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
*/
@Deprecated
public void growthFactor( int growthFactor ) {}
/** Gets the growth factor (2).
*
* @return the growth factor of this set, which is fixed (2).
* @see #growthFactor(int)
* @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
*/
@Deprecated
public int growthFactor() {
return 16;
}
/** The entry class for a hash map does not record key and value, but
* rather the position in the hash table of the corresponding entry. This
* is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in
* the map */
private final class MapEntry implements Long2ShortMap.Entry , Map.Entry<Long, Short> {
// The table index this entry refers to, or -1 if this entry has been deleted.
private int index;
MapEntry( final int index ) {
this.index = index;
}
public Long getKey() {
return (Long.valueOf(key[ index ]));
}
public long getLongKey() {
return key[ index ];
}
public Short getValue() {
return (Short.valueOf(value[ index ]));
}
public short getShortValue() {
return value[ index ];
}
public short setValue( final short v ) {
final short oldValue = value[ index ];
value[ index ] = v;
return oldValue;
}
public Short setValue( final Short v ) {
return (Short.valueOf(setValue( ((v).shortValue()) )));
}
@SuppressWarnings("unchecked")
public boolean equals( final Object o ) {
if (!(o instanceof Map.Entry)) return false;
Map.Entry<Long, Short> e = (Map.Entry<Long, Short>)o;
return ( strategy.equals( (key[ index ]), (((e.getKey()).longValue())) ) ) && ( (value[ index ]) == (((e.getValue()).shortValue())) );
}
public int hashCode() {
return ( strategy.hashCode(key[ index ]) ) ^ (value[ index ]);
}
public String toString() {
return key[ index ] + "=>" + value[ index ];
}
}
/** An iterator over a hash map. */
private class MapIterator {
/** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be
returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */
int pos = Long2ShortOpenCustomHashMap.this.n;
/** The index of the last entry that has been returned. It is -1 if either
we did not return an entry yet, or the last returned entry has been removed. */
int last = -1;
/** A downward counter measuring how many entries must still be returned. */
int c = size;
/** A lazily allocated list containing the keys of elements that have wrapped around the table because of removals; such elements
would not be enumerated (other elements would be usually enumerated twice in their place). */
LongArrayList wrapped;
{
final boolean used[] = Long2ShortOpenCustomHashMap.this.used;
if ( c != 0 ) while( ! used[ --pos ] );
}
public boolean hasNext() {
return c != 0;
}
public int nextEntry() {
if ( ! hasNext() ) throw new NoSuchElementException();
c--;
// We are just enumerating elements from the wrapped list.
if ( pos < 0 ) {
final long k = wrapped.getLong( - ( last = --pos ) - 2 );
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return pos;
pos = ( pos + 1 ) & mask;
}
}
last = pos;
//System.err.println( "Count: " + c );
if ( c != 0 ) {
final boolean used[] = Long2ShortOpenCustomHashMap.this.used;
while ( pos-- != 0 && !used[ pos ] );
// When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
}
return last;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry. If any entry wraps around the table, instantiates
* lazily {@link #wrapped} and stores the entry key.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final int shiftKeys( int pos ) {
// Shift entries with the same hash.
int last, slot;
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( used[ pos ] ) {
slot = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(key[ pos ]) ) ) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! used[ pos ] ) break;
if ( pos < last ) {
// Wrapped entry.
if ( wrapped == null ) wrapped = new LongArrayList ();
wrapped.add( key[ pos ] );
}
key[ last ] = key[ pos ];
value[ last ] = value[ pos ];
}
used[ last ] = false;
return last;
}
@SuppressWarnings("unchecked")
public void remove() {
if ( last == -1 ) throw new IllegalStateException();
if ( pos < -1 ) {
// We're removing wrapped entries.
Long2ShortOpenCustomHashMap.this.remove( wrapped.getLong( - pos - 2 ) );
last = -1;
return;
}
size--;
if ( shiftKeys( last ) == pos && c > 0 ) {
c++;
nextEntry();
}
last = -1; // You can no longer remove this entry.
if ( ASSERTS ) checkTable();
}
public int skip( final int n ) {
int i = n;
while( i-- != 0 && hasNext() ) nextEntry();
return n - i - 1;
}
}
private class EntryIterator extends MapIterator implements ObjectIterator<Long2ShortMap.Entry > {
private MapEntry entry;
public Long2ShortMap.Entry next() {
return entry = new MapEntry( nextEntry() );
}
@Override
public void remove() {
super.remove();
entry.index = -1; // You cannot use a deleted entry.
}
}
private class FastEntryIterator extends MapIterator implements ObjectIterator<Long2ShortMap.Entry > {
final BasicEntry entry = new BasicEntry ( ((long)0), ((short)0) );
public BasicEntry next() {
final int e = nextEntry();
entry.key = key[ e ];
entry.value = value[ e ];
return entry;
}
}
private final class MapEntrySet extends AbstractObjectSet<Long2ShortMap.Entry > implements FastEntrySet {
public ObjectIterator<Long2ShortMap.Entry > iterator() {
return new EntryIterator();
}
public ObjectIterator<Long2ShortMap.Entry > fastIterator() {
return new FastEntryIterator();
}
@SuppressWarnings("unchecked")
public boolean contains( final Object o ) {
if ( !( o instanceof Map.Entry ) ) return false;
final Map.Entry<Long, Short> e = (Map.Entry<Long, Short>)o;
final long k = ((e.getKey()).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) return ( (value[ pos ]) == (((e.getValue()).shortValue())) );
pos = ( pos + 1 ) & mask;
}
return false;
}
@SuppressWarnings("unchecked")
public boolean remove( final Object o ) {
if ( !( o instanceof Map.Entry ) ) return false;
final Map.Entry<Long, Short> e = (Map.Entry<Long, Short>)o;
final long k = ((e.getKey()).longValue());
// The starting point.
int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( strategy.equals( (key[ pos ]), (k) ) ) ) {
Long2ShortOpenCustomHashMap.this.remove( e.getKey() );
return true;
}
pos = ( pos + 1 ) & mask;
}
return false;
}
public int size() {
return size;
}
public void clear() {
Long2ShortOpenCustomHashMap.this.clear();
}
}
public FastEntrySet long2ShortEntrySet() {
if ( entries == null ) entries = new MapEntrySet();
return entries;
}
/** An iterator on keys.
*
* <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
* (and possibly their type-specific counterparts) so that they return keys
* instead of entries.
*/
private final class KeyIterator extends MapIterator implements LongIterator {
public KeyIterator() { super(); }
public long nextLong() { return key[ nextEntry() ]; }
public Long next() { return (Long.valueOf(key[ nextEntry() ])); }
}
private final class KeySet extends AbstractLongSet {
public LongIterator iterator() {
return new KeyIterator();
}
public int size() {
return size;
}
public boolean contains( long k ) {
return containsKey( k );
}
public boolean remove( long k ) {
final int oldSize = size;
Long2ShortOpenCustomHashMap.this.remove( k );
return size != oldSize;
}
public void clear() {
Long2ShortOpenCustomHashMap.this.clear();
}
}
public LongSet keySet() {
if ( keys == null ) keys = new KeySet();
return keys;
}
/** An iterator on values.
*
* <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
* (and possibly their type-specific counterparts) so that they return values
* instead of entries.
*/
private final class ValueIterator extends MapIterator implements ShortIterator {
public ValueIterator() { super(); }
public short nextShort() { return value[ nextEntry() ]; }
public Short next() { return (Short.valueOf(value[ nextEntry() ])); }
}
public ShortCollection values() {
if ( values == null ) values = new AbstractShortCollection () {
public ShortIterator iterator() {
return new ValueIterator();
}
public int size() {
return size;
}
public boolean contains( short v ) {
return containsValue( v );
}
public void clear() {
Long2ShortOpenCustomHashMap.this.clear();
}
};
return values;
}
/** A no-op for backward compatibility. The kind of tables implemented by
* this class never need rehashing.
*
* <P>If you need to reduce the table size to fit exactly
* this set, use {@link #trim()}.
*
* @return true.
* @see #trim()
* @deprecated A no-op.
*/
@Deprecated
public boolean rehash() {
return true;
}
/** Rehashes the map, making the table as small as possible.
*
* <P>This method rehashes the table to the smallest size satisfying the
* load factor. It can be used when the set will not be changed anymore, so
* to optimize access speed and size.
*
* <P>If the table size is already the minimum possible, this method
* does nothing.
*
* @return true if there was enough memory to trim the map.
* @see #trim(int)
*/
public boolean trim() {
final int l = arraySize( size, f );
if ( l >= n ) return true;
try {
rehash( l );
}
catch(OutOfMemoryError cantDoIt) { return false; }
return true;
}
/** Rehashes this map if the table is too large.
*
* <P>Let <var>N</var> be the smallest table size that can hold
* <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
* table size is smaller than or equal to <var>N</var>, this method does
* nothing. Otherwise, it rehashes this map in a table of size
* <var>N</var>.
*
* <P>This method is useful when reusing maps. {@linkplain #clear() Clearing a
* map} leaves the table size untouched. If you are reusing a map
* many times, you can call this method with a typical
* size to avoid keeping around a very large table just
* because of a few large transient maps.
*
* @param n the threshold for the trimming.
* @return true if there was enough memory to trim the map.
* @see #trim()
*/
public boolean trim( final int n ) {
final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) );
if ( this.n <= l ) return true;
try {
rehash( l );
}
catch( OutOfMemoryError cantDoIt ) { return false; }
return true;
}
/** Resizes the map.
*
* <P>This method implements the basic rehashing strategy, and may be
* overriden by subclasses implementing different rehashing strategies (e.g.,
* disk-based rehashing). However, you should not override this method
* unless you understand the internal workings of this class.
*
* @param newN the new size
*/
@SuppressWarnings("unchecked")
protected void rehash( final int newN ) {
int i = 0, pos;
final boolean used[] = this.used;
long k;
final long key[] = this.key;
final short value[] = this.value;
final int newMask = newN - 1;
final long newKey[] = new long[ newN ];
final short newValue[] = new short[newN];
final boolean newUsed[] = new boolean[ newN ];
for( int j = size; j-- != 0; ) {
while( ! used[ i ] ) i++;
k = key[ i ];
pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & newMask;
while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask;
newUsed[ pos ] = true;
newKey[ pos ] = k;
newValue[ pos ] = value[ i ];
i++;
}
n = newN;
mask = newMask;
maxFill = maxFill( n, f );
this.key = newKey;
this.value = newValue;
this.used = newUsed;
}
/** Returns a deep copy of this map.
*
* <P>This method performs a deep copy of this hash map; the data stored in the
* map, however, is not cloned. Note that this makes a difference only for object keys.
*
* @return a deep copy of this map.
*/
@SuppressWarnings("unchecked")
public Long2ShortOpenCustomHashMap clone() {
Long2ShortOpenCustomHashMap c;
try {
c = (Long2ShortOpenCustomHashMap )super.clone();
}
catch(CloneNotSupportedException cantHappen) {
throw new InternalError();
}
c.keys = null;
c.values = null;
c.entries = null;
c.key = key.clone();
c.value = value.clone();
c.used = used.clone();
c.strategy = strategy;
return c;
}
/** Returns a hash code for this map.
*
* This method overrides the generic method provided by the superclass.
* Since <code>equals()</code> is not overriden, it is important
* that the value returned by this method is the same value as
* the one returned by the overriden method.
*
* @return a hash code for this map.
*/
public int hashCode() {
int h = 0;
for( int j = size, i = 0, t = 0; j-- != 0; ) {
while( ! used[ i ] ) i++;
t = ( strategy.hashCode(key[ i ]) );
t ^= (value[ i ]);
h += t;
i++;
}
return h;
}
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
final long key[] = this.key;
final short value[] = this.value;
final MapIterator i = new MapIterator();
s.defaultWriteObject();
for( int j = size, e; j-- != 0; ) {
e = i.nextEntry();
s.writeLong( key[ e ] );
s.writeShort( value[ e ] );
}
}
@SuppressWarnings("unchecked")
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
n = arraySize( size, f );
maxFill = maxFill( n, f );
mask = n - 1;
final long key[] = this.key = new long[ n ];
final short value[] = this.value = new short[ n ];
final boolean used[] = this.used = new boolean[ n ];
long k;
short v;
for( int i = size, pos = 0; i-- != 0; ) {
k = s.readLong();
v = s.readShort();
pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode(k) ) ) & mask;
while ( used[ pos ] ) pos = ( pos + 1 ) & mask;
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
}
if ( ASSERTS ) checkTable();
}
private void checkTable() {}
}
| |
/*
* Copyright (c) 2015 git-afsantos
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.jtuples;
import java.util.function.Function;
/**
* Represents a mathematical ordered quintuple of objects.
* The order in which the objects appear in the quintuple is significant:
* the ordered quintuple {@code (a, b, c, d, e)} is different from the ordered
* quintuple {@code (b, a, c, d, e)} unless {@code a} and {@code b} are equal.
*
* @author Benjamim Sonntag
* @param <A> the type of the first element of the quintuple
* @param <B> the type of the second element of the quintuple
* @param <C> the type of the third element of the quintuple
* @param <D> the type of the fourth element of the quintuple
* @param <E> the type of the fifth element of the quintuple
*/
public final class Quintuple<A, B, C, D, E> extends AbstractTuple {
private static final int ARITY = 5;
private final A first;
private final B second;
private final C third;
private final D fourth;
private final E fifth;
/**
* Returns a new quintuple of {@code (null, null, null, null, null)}.
*/
public Quintuple() {
this(null, null, null, null, null);
}
/**
* Returns a new ordered quintuple, containing the given objects.
* @param first the first member of the ordered quintuple
* @param second the second member of the ordered quintuple
* @param third the third member of the ordered quintuple
* @param fourth the fourth member of the ordered quintuple
* @param fifth the fifth member of the ordered quintuple
*/
public Quintuple(A first, B second, C third, D fourth, E fifth) {
this.first = first;
this.second = second;
this.third = third;
this.fourth = fourth;
this.fifth = fifth;
}
/**
* Returns the first member of this ordered quintuple.
* @return the first member of the quintuple
*/
public A first() {
return first;
}
/**
* Returns the second member of this ordered quintuple.
* @return the second member of the quintuple
*/
public B second() {
return second;
}
/**
* Returns the third member of this ordered quintuple.
* @return the third member of the quintuple
*/
public C third() {
return third;
}
/**
* Returns the fourth member of this ordered quintuple.
* @return the fourth member of the quintuple
*/
public D fourth() {
return fourth;
}
/**
* Returns the fifth member of this ordered quintuple.
* @return the fifth member of the quintuple
*/
public E fifth() {
return fifth;
}
/**
* Returns the constant {@code 5}.
* The arity of a quintuple is defined to be 5.
* @return the constant 5
*/
@Override
public int arity() {
return ARITY;
}
/**
* {@inheritDoc}
*/
@Override
public Quintuple<E, D, C, B, A> invert() {
return new Quintuple<>(fifth(), fourth(), third(), second(), first());
}
/**
* {@inheritDoc}
*/
@Override
public Quintuple<B, C, D, E, A> shiftLeft() {
return shiftLeft(first());
}
/**
* {@inheritDoc}
*/
@Override
public <V> Quintuple<B, C, D, E, V> shiftLeft(V value) {
return new Quintuple<>(second(), third(), fourth(), fifth(), value);
}
/**
* {@inheritDoc}
*/
@Override
public Quintuple<E, A, B, C, D> shiftRight() {
return shiftRight(fifth());
}
/**
* {@inheritDoc}
*/
@Override
public <V> Quintuple<V, A, B, C, D> shiftRight(V value) {
return new Quintuple<>(value, first(), second(), third(), fourth());
}
/**
* {@inheritDoc}
*/
@Override
public Object[] toArray() {
return new Object[] {
first(), second(), third(), fourth(), fifth()
};
}
/**
* Returns a new quintuple, transforming the first member of this
* quintuple.
* The first member of the new quintuple is the result of applying the
* given function to the first member of this quintuple.
* The remaining members are preserved.
* @param <R> the type of the function's result
* @param function the function used to transform the first member
* @return a quintuple with the result of the function as the first member
*/
public <R> Quintuple<R, B, C, D, E> applyFirst(Function<A, R> function) {
return new Quintuple<>(function.apply(first()),
second(), third(), fourth(), fifth());
}
/**
* Returns a new quintuple, transforming the second member of this
* quintuple.
* The second member of the new quintuple is the result of applying the
* given function to the second member of this quintuple.
* The remaining members are preserved.
* @param <R> the type of the function's result
* @param function the function used to transform the first member
* @return a quintuple with the result of the function as the second member
*/
public <R> Quintuple<A, R, C, D, E> applySecond(Function<B, R> function) {
return new Quintuple<>(first(), function.apply(second()),
third(), fourth(), fifth());
}
/**
* Returns a new quintuple, transforming the third member of this
* quintuple.
* The third member of the new quintuple is the result of applying the
* given function to the third member of this quintuple.
* The remaining members are preserved.
* @param <R> the type of the function's result
* @param function the function used to transform the first member
* @return a quintuple with the result of the function as the third member
*/
public <R> Quintuple<A, B, R, D, E> applyThird(Function<C, R> function) {
return new Quintuple<>(first(), second(), function.apply(third()),
fourth(), fifth());
}
/**
* Returns a new quintuple, transforming the fourth member of this
* quintuple.
* The fourth member of the new quintuple is the result of applying the
* given function to the fourth member of this quintuple.
* The remaining members are preserved.
* @param <R> the type of the function's result
* @param function the function used to transform the first member
* @return a quintuple with the result of the function as the fourth member
*/
public <R> Quintuple<A, B, C, R, E> applyFourth(Function<D, R> function) {
return new Quintuple<>(first(), second(), third(),
function.apply(fourth()), fifth());
}
/**
* Returns a new quintuple, transforming the fifth member of this
* quintuple.
* The fifth member of the new quintuple is the result of applying the
* given function to the fifth member of this quintuple.
* The remaining members are preserved.
* @param <R> the type of the function's result
* @param function the function used to transform the first member
* @return a quintuple with the result of the function as the fifth member
*/
public <R> Quintuple<A, B, C, D, R> applyFifth(Function<E, R> function) {
return new Quintuple<>(first(), second(), third(), fourth(),
function.apply(fifth()));
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is
* distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either
* express or implied. See the License for the specific language
* governing
* permissions and limitations under the License.
*/
package com.amazonaws.auth;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Date;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.amazonaws.AmazonClientException;
/**
* Unit tests for the InstanceProfileCredentialsProvider.
*/
public class InstanceProfileCredentialsProviderTest {
/** One minute (in milliseconds) */
private static final long ONE_MINUTE = 1000L * 60;
private EC2MetadataServiceMock mockServer;
/** Starts up the mock EC2 Instance Metadata Service. */
@Before
public void setUp() throws Exception {
mockServer = new EC2MetadataServiceMock();
mockServer.start();
}
/** Shuts down the mock EC2 Instance Metadata Service. */
@After
public void tearDown() throws Exception {
mockServer.stop();
Thread.sleep(1000);
}
/** Tests that the credentials provider reloads credentials appropriately */
@Test
public void testNeedsToLoadCredentialsMethod() throws Exception {
TestInstanceProfileCredentialsProvider provider = new TestInstanceProfileCredentialsProvider();
// The provider should load credentials when it doesn't have any
assertNull(provider.getCurrentCredentials());
assertTrue(provider.needsToLoadCredentials());
// The provider should not refresh credentials when they aren't close to expiring and are recent
provider.setCredentials(new BasicAWSCredentials("foo", "bar"));
provider.setCredentialsExpiration(new Date(System.currentTimeMillis() + (ONE_MINUTE * 60 * 24)));
provider.setLastInstanceProfileCheck(new Date());
assertFalse(provider.needsToLoadCredentials());
// The provider should refresh credentials when they aren't close to expiring, but are more than an hour old
provider.setCredentials(new BasicAWSCredentials("foo", "bar"));
provider.setCredentialsExpiration(new Date(System.currentTimeMillis() + (ONE_MINUTE * 16)));
provider.setLastInstanceProfileCheck(new Date(System.currentTimeMillis() - (ONE_MINUTE * 61)));
assertTrue(provider.needsToLoadCredentials());
// The provider should refresh credentials when they are close to expiring
provider.setCredentials(new BasicAWSCredentials("foo", "bar"));
provider.setCredentialsExpiration(new Date(System.currentTimeMillis() + ONE_MINUTE * 14));
provider.setLastInstanceProfileCheck(new Date());
assertTrue(provider.needsToLoadCredentials());
}
private static class TestInstanceProfileCredentialsProvider extends InstanceProfileCredentialsProvider {
public AWSCredentials getCurrentCredentials() {
return this.credentials;
}
public void setCredentials(AWSCredentials credentials) {
this.credentials = credentials;
}
public void setCredentialsExpiration(Date credentialsExpiration) {
this.credentialsExpiration = credentialsExpiration;
}
public void setLastInstanceProfileCheck(Date lastInstanceProfileCheck) {
this.lastInstanceProfileCheck = lastInstanceProfileCheck;
}
public Date getLastInstanceProfileCheck() {
return this.lastInstanceProfileCheck;
}
}
/** Tests that we correctly handle the metadata service returning credentials. */
@Test
public void testSessionCredentials() throws Exception {
mockServer.setResponseFileName("sessionResponse");
mockServer.setAvailableSecurityCredentials("aws-dr-tools-test");
TestInstanceProfileCredentialsProvider credentialsProvider = new TestInstanceProfileCredentialsProvider();
assertNull(credentialsProvider.getLastInstanceProfileCheck());
AWSSessionCredentials credentials = (AWSSessionCredentials)credentialsProvider.getCredentials();
assertNotNull(credentialsProvider.getLastInstanceProfileCheck());
assertEquals("ACCESS_KEY_ID", credentials.getAWSAccessKeyId());
assertEquals("SECRET_ACCESS_KEY", credentials.getAWSSecretKey());
assertEquals("TOKEN_TOKEN_TOKEN", credentials.getSessionToken());
}
/**
* Tests that we correctly handle the metadata service returning credentials
* when multiple instance profiles are available.
*/
@Test
public void testSessionCredentials_MultipleInstanceProfiles() throws Exception {
mockServer.setResponseFileName("sessionResponse");
mockServer.setAvailableSecurityCredentials("test-credentials");
InstanceProfileCredentialsProvider credentialsProvider = new InstanceProfileCredentialsProvider();
AWSSessionCredentials credentials = (AWSSessionCredentials)credentialsProvider.getCredentials();
assertEquals("ACCESS_KEY_ID", credentials.getAWSAccessKeyId());
assertEquals("SECRET_ACCESS_KEY", credentials.getAWSSecretKey());
assertEquals("TOKEN_TOKEN_TOKEN", credentials.getSessionToken());
}
/**
* Tests that we correctly handle when no instance profiles are available
* through the metadata service.
*/
@Test
public void testNoInstanceProfiles() throws Exception {
mockServer.setResponseFileName("sessionResponse");
mockServer.setAvailableSecurityCredentials("");
InstanceProfileCredentialsProvider credentialsProvider = new InstanceProfileCredentialsProvider();
try {
credentialsProvider.getCredentials();
fail("Expected an AmazonClientException, but wasn't thrown");
} catch (AmazonClientException ace) {
assertNotNull(ace.getMessage());
}
}
/**
* Tests that we correctly handle when the metadata service credentials have
* expired.
*/
@Test
public void testSessionCredentials_Expired() throws Exception {
mockServer.setResponseFileName("sessionResponseExpired");
mockServer.setAvailableSecurityCredentials("test-credentials");
InstanceProfileCredentialsProvider credentialsProvider = new InstanceProfileCredentialsProvider();
try {
credentialsProvider.getCredentials();
fail("Expected an AmazonClientException, but wasn't thrown");
} catch (AmazonClientException ace) {
assertNotNull(ace.getMessage());
}
}
/**
* Tests how the metadata service credentials provider behaves when the
* metadata service isn't available (ex: the code isn't running on EC2, or
* IMDS is temporarily unresponsive).
*/
@Test
public void testNoMetadataService() throws Exception {
mockServer.stop();
TestInstanceProfileCredentialsProvider credentialsProvider = new TestInstanceProfileCredentialsProvider();
// When there are no credentials, the provider should throw an exception if we can't connect
assertNull(credentialsProvider.getCurrentCredentials());
try {
credentialsProvider.getCredentials();
fail("Expected an AmazonClientException, but wasn't thrown");
} catch (AmazonClientException ace) {
assertNotNull(ace.getMessage());
}
// When there are valid credentials (but need to be refreshed), the provider should NOT
// throw an exception when we can't connect
credentialsProvider.setCredentials(new BasicAWSCredentials("foo", "bar"));
credentialsProvider.setCredentialsExpiration(new Date(System.currentTimeMillis() + (ONE_MINUTE * 4)));
credentialsProvider.setLastInstanceProfileCheck(new Date(System.currentTimeMillis() - (ONE_MINUTE * 61)));
assertNotNull(credentialsProvider.getCredentials());
// When the credentials are expired, the provider should throw an exception when we can't connect
credentialsProvider.setCredentialsExpiration(new Date(System.currentTimeMillis() - (ONE_MINUTE * 4)));
credentialsProvider.setLastInstanceProfileCheck(new Date(System.currentTimeMillis() - (ONE_MINUTE * 61)));
try {
credentialsProvider.getCredentials();
fail("Expected an AmazonClientException, but wasn't thrown");
} catch (AmazonClientException ace) {
assertNotNull(ace.getMessage());
}
}
/**
* Tests by initiating a refresh thread in parallel which refreshes the
* credentials. Next call to credentials provider will result in refreshing
* and getting new credentials.
*/
@Test
public void testMultipleThreadsLoadingAndRefreshingCredentials()
throws Exception {
mockServer.setResponseFileName("sessionResponse");
mockServer.setAvailableSecurityCredentials("test-credentials");
InstanceProfileCredentialsProvider credentialsProvider = new InstanceProfileCredentialsProvider();
AWSSessionCredentials credentials = (AWSSessionCredentials) credentialsProvider
.getCredentials();
assertNotNull(credentials);
new RefreshThread(credentialsProvider).join();
AWSSessionCredentials newCredentials = (AWSSessionCredentials) credentialsProvider
.getCredentials();
assertNotNull(newCredentials);
assertNotSame(credentials, newCredentials);
}
private class RefreshThread extends Thread{
private InstanceProfileCredentialsProvider provider;
public RefreshThread(InstanceProfileCredentialsProvider provider){
this.provider = provider;
this.start();
}
@Override
public void run() {
this.provider.refresh();
}
}
}
| |
/*
* Copyright (c) 2015 Martin Pfeffer
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pepperonas.fxiconics.gmd;
import com.pepperonas.fxiconics.base.FxFontBase;
/**
* @author Martin Pfeffer (pepperonas)
*/
public class FxFontGoogleMaterial extends FxFontBase {
public enum Icons {
gmd_3d_rotation("\ue84d"),
gmd_access_alarm("\ue190"),
gmd_access_alarms("\ue191"),
gmd_access_time("\ue192"),
gmd_accessibility("\ue84e"),
gmd_account_balance("\ue84f"),
gmd_account_balance_wallet("\ue850"),
gmd_account_box("\ue851"),
gmd_account_circle("\ue853"),
gmd_adb("\ue60e"),
gmd_add("\ue145"),
gmd_add_alarm("\ue193"),
gmd_add_alert("\ue003"),
gmd_add_box("\ue146"),
gmd_add_circle("\ue147"),
gmd_add_circle_outline("\ue148"),
gmd_add_shopping_cart("\ue854"),
gmd_add_to_photos("\ue39d"),
gmd_adjust("\ue39e"),
gmd_airline_seat_flat("\ue630"),
gmd_airline_seat_flat_angled("\ue631"),
gmd_airline_seat_individual_suite("\ue632"),
gmd_airline_seat_legroom_extra("\ue633"),
gmd_airline_seat_legroom_normal("\ue634"),
gmd_airline_seat_legroom_reduced("\ue635"),
gmd_airline_seat_recline_extra("\ue636"),
gmd_airline_seat_recline_normal("\ue637"),
gmd_airplanemode_active("\ue195"),
gmd_airplanemode_inactive("\ue194"),
gmd_airplay("\ue055"),
gmd_alarm("\ue855"),
gmd_alarm_add("\ue856"),
gmd_alarm_off("\ue857"),
gmd_alarm_on("\ue858"),
gmd_album("\ue019"),
gmd_android("\ue859"),
gmd_announcement("\ue85a"),
gmd_apps("\ue5c3"),
gmd_archive("\ue149"),
gmd_arrow_back("\ue5c4"),
gmd_arrow_drop_down("\ue5c5"),
gmd_arrow_drop_down_circle("\ue5c6"),
gmd_arrow_drop_up("\ue5c7"),
gmd_arrow_forward("\ue5c8"),
gmd_aspect_ratio("\ue85b"),
gmd_assessment("\ue85c"),
gmd_assignment("\ue85d"),
gmd_assignment_ind("\ue85e"),
gmd_assignment_late("\ue85f"),
gmd_assignment_return("\ue860"),
gmd_assignment_returned("\ue861"),
gmd_assignment_turned_in("\ue862"),
gmd_assistant("\ue39f"),
gmd_assistant_photo("\ue3a0"),
gmd_attach_file("\ue226"),
gmd_attach_money("\ue227"),
gmd_attachment("\ue2bc"),
gmd_audiotrack("\ue3a1"),
gmd_autorenew("\ue863"),
gmd_av_timer("\ue01b"),
gmd_backspace("\ue14a"),
gmd_backup("\ue864"),
gmd_battery_alert("\ue19c"),
gmd_battery_charging_full("\ue1a3"),
gmd_battery_full("\ue1a4"),
gmd_battery_std("\ue1a5"),
gmd_battery_unknown("\ue1a6"),
gmd_beenhere("\ue52d"),
gmd_block("\ue14b"),
gmd_bluetooth("\ue1a7"),
gmd_bluetooth_audio("\ue60f"),
gmd_bluetooth_connected("\ue1a8"),
gmd_bluetooth_disabled("\ue1a9"),
gmd_bluetooth_searching("\ue1aa"),
gmd_blur_circular("\ue3a2"),
gmd_blur_linear("\ue3a3"),
gmd_blur_off("\ue3a4"),
gmd_blur_on("\ue3a5"),
gmd_book("\ue865"),
gmd_bookmark("\ue866"),
gmd_bookmark_border("\ue867"),
gmd_border_all("\ue228"),
gmd_border_bottom("\ue229"),
gmd_border_clear("\ue22a"),
gmd_border_color("\ue22b"),
gmd_border_horizontal("\ue22c"),
gmd_border_inner("\ue22d"),
gmd_border_left("\ue22e"),
gmd_border_outer("\ue22f"),
gmd_border_right("\ue230"),
gmd_border_style("\ue231"),
gmd_border_top("\ue232"),
gmd_border_vertical("\ue233"),
gmd_brightness_1("\ue3a6"),
gmd_brightness_2("\ue3a7"),
gmd_brightness_3("\ue3a8"),
gmd_brightness_4("\ue3a9"),
gmd_brightness_5("\ue3aa"),
gmd_brightness_6("\ue3ab"),
gmd_brightness_7("\ue3ac"),
gmd_brightness_auto("\ue1ab"),
gmd_brightness_high("\ue1ac"),
gmd_brightness_low("\ue1ad"),
gmd_brightness_medium("\ue1ae"),
gmd_broken_image("\ue3ad"),
gmd_brush("\ue3ae"),
gmd_bug_report("\ue868"),
gmd_build("\ue869"),
gmd_business("\ue0af"),
gmd_cached("\ue86a"),
gmd_cake("\ue7e9"),
gmd_call("\ue0b0"),
gmd_call_end("\ue0b1"),
gmd_call_made("\ue0b2"),
gmd_call_merge("\ue0b3"),
gmd_call_missed("\ue0b4"),
gmd_call_received("\ue0b5"),
gmd_call_split("\ue0b6"),
gmd_camera("\ue3af"),
gmd_camera_alt("\ue3b0"),
gmd_camera_enhance("\ue8fc"),
gmd_camera_front("\ue3b1"),
gmd_camera_rear("\ue3b2"),
gmd_camera_roll("\ue3b3"),
gmd_cancel("\ue5c9"),
gmd_card_giftcard("\ue8f6"),
gmd_card_membership("\ue8f7"),
gmd_card_travel("\ue8f8"),
gmd_cast("\ue307"),
gmd_cast_connected("\ue308"),
gmd_center_focus_strong("\ue3b4"),
gmd_center_focus_weak("\ue3b5"),
gmd_change_history("\ue86b"),
gmd_chat("\ue0b7"),
gmd_chat_bubble("\ue0ca"),
gmd_chat_bubble_outline("\ue0cb"),
gmd_check("\ue5ca"),
gmd_check_box("\ue834"),
gmd_check_box_outline_blank("\ue835"),
gmd_check_circle("\ue86c"),
gmd_chevron_left("\ue5cb"),
gmd_chevron_right("\ue5cc"),
gmd_chrome_reader_mode("\ue86d"),
gmd_class("\ue86e"),
gmd_clear("\ue14c"),
gmd_clear_all("\ue0b8"),
gmd_close("\ue5cd"),
gmd_closed_caption("\ue01c"),
gmd_cloud("\ue2bd"),
gmd_cloud_circle("\ue2be"),
gmd_cloud_done("\ue2bf"),
gmd_cloud_download("\ue2c0"),
gmd_cloud_off("\ue2c1"),
gmd_cloud_queue("\ue2c2"),
gmd_cloud_upload("\ue2c3"),
gmd_code("\ue86f"),
gmd_collections("\ue3b6"),
gmd_collections_bookmark("\ue431"),
gmd_color_lens("\ue3b7"),
gmd_colorize("\ue3b8"),
gmd_comment("\ue0b9"),
gmd_compare("\ue3b9"),
gmd_computer("\ue30a"),
gmd_confirmation_number("\ue638"),
gmd_contact_phone("\ue0cf"),
gmd_contacts("\ue0ba"),
gmd_content_copy("\ue14d"),
gmd_content_cut("\ue14e"),
gmd_content_paste("\ue14f"),
gmd_control_point("\ue3ba"),
gmd_control_point_duplicate("\ue3bb"),
gmd_create("\ue150"),
gmd_credit_card("\ue870"),
gmd_crop("\ue3be"),
gmd_crop_16_9("\ue3bc"),
gmd_crop_3_2("\ue3bd"),
gmd_crop_5_4("\ue3bf"),
gmd_crop_7_5("\ue3c0"),
gmd_crop_din("\ue3c1"),
gmd_crop_free("\ue3c2"),
gmd_crop_landscape("\ue3c3"),
gmd_crop_original("\ue3c4"),
gmd_crop_portrait("\ue3c5"),
gmd_crop_square("\ue3c6"),
gmd_dashboard("\ue871"),
gmd_data_usage("\ue1af"),
gmd_dehaze("\ue3c7"),
gmd_delete("\ue872"),
gmd_description("\ue873"),
gmd_desktop_mac("\ue30b"),
gmd_desktop_windows("\ue30c"),
gmd_details("\ue3c8"),
gmd_developer_board("\ue30d"),
gmd_developer_mode("\ue1b0"),
gmd_device_hub("\ue335"),
gmd_devices("\ue1b1"),
gmd_dialer_sip("\ue0bb"),
gmd_dialpad("\ue0bc"),
gmd_directions("\ue52e"),
gmd_directions_bike("\ue52f"),
gmd_directions_boat("\ue532"),
gmd_directions_bus("\ue530"),
gmd_directions_car("\ue531"),
gmd_directions_railway("\ue534"),
gmd_directions_run("\ue566"),
gmd_directions_subway("\ue533"),
gmd_directions_transit("\ue535"),
gmd_directions_walk("\ue536"),
gmd_disc_full("\ue610"),
gmd_dns("\ue875"),
gmd_do_not_disturb("\ue612"),
gmd_do_not_disturb_alt("\ue611"),
gmd_dock("\ue30e"),
gmd_domain("\ue7ee"),
gmd_done("\ue876"),
gmd_done_all("\ue877"),
gmd_drafts("\ue151"),
gmd_drive_eta("\ue613"),
gmd_dvr("\ue1b2"),
gmd_edit("\ue3c9"),
gmd_eject("\ue8fb"),
gmd_email("\ue0be"),
gmd_equalizer("\ue01d"),
gmd_error("\ue000"),
gmd_error_outline("\ue001"),
gmd_event("\ue878"),
gmd_event_available("\ue614"),
gmd_event_busy("\ue615"),
gmd_event_note("\ue616"),
gmd_event_seat("\ue903"),
gmd_exit_to_app("\ue879"),
gmd_expand_less("\ue5ce"),
gmd_expand_more("\ue5cf"),
gmd_explicit("\ue01e"),
gmd_explore("\ue87a"),
gmd_exposure("\ue3ca"),
gmd_exposure_neg_1("\ue3cb"),
gmd_exposure_neg_2("\ue3cc"),
gmd_exposure_plus_1("\ue3cd"),
gmd_exposure_plus_2("\ue3ce"),
gmd_exposure_zero("\ue3cf"),
gmd_extension("\ue87b"),
gmd_face("\ue87c"),
gmd_fast_forward("\ue01f"),
gmd_fast_rewind("\ue020"),
gmd_favorite("\ue87d"),
gmd_favorite_border("\ue87e"),
gmd_feedback("\ue87f"),
gmd_file_download("\ue2c4"),
gmd_file_upload("\ue2c6"),
gmd_filter("\ue3d3"),
gmd_filter_1("\ue3d0"),
gmd_filter_2("\ue3d1"),
gmd_filter_3("\ue3d2"),
gmd_filter_4("\ue3d4"),
gmd_filter_5("\ue3d5"),
gmd_filter_6("\ue3d6"),
gmd_filter_7("\ue3d7"),
gmd_filter_8("\ue3d8"),
gmd_filter_9("\ue3d9"),
gmd_filter_9_plus("\ue3da"),
gmd_filter_b_and_w("\ue3db"),
gmd_filter_center_focus("\ue3dc"),
gmd_filter_drama("\ue3dd"),
gmd_filter_frames("\ue3de"),
gmd_filter_hdr("\ue3df"),
gmd_filter_list("\ue152"),
gmd_filter_none("\ue3e0"),
gmd_filter_tilt_shift("\ue3e2"),
gmd_filter_vintage("\ue3e3"),
gmd_find_in_page("\ue880"),
gmd_find_replace("\ue881"),
gmd_flag("\ue153"),
gmd_flare("\ue3e4"),
gmd_flash_auto("\ue3e5"),
gmd_flash_off("\ue3e6"),
gmd_flash_on("\ue3e7"),
gmd_flight("\ue539"),
gmd_flight_land("\ue904"),
gmd_flight_takeoff("\ue905"),
gmd_flip("\ue3e8"),
gmd_flip_to_back("\ue882"),
gmd_flip_to_front("\ue883"),
gmd_folder("\ue2c7"),
gmd_folder_open("\ue2c8"),
gmd_folder_shared("\ue2c9"),
gmd_folder_special("\ue617"),
gmd_font_download("\ue167"),
gmd_format_align_center("\ue234"),
gmd_format_align_justify("\ue235"),
gmd_format_align_left("\ue236"),
gmd_format_align_right("\ue237"),
gmd_format_bold("\ue238"),
gmd_format_clear("\ue239"),
gmd_format_color_fill("\ue23a"),
gmd_format_color_reset("\ue23b"),
gmd_format_color_text("\ue23c"),
gmd_format_indent_decrease("\ue23d"),
gmd_format_indent_increase("\ue23e"),
gmd_format_italic("\ue23f"),
gmd_format_line_spacing("\ue240"),
gmd_format_list_bulleted("\ue241"),
gmd_format_list_numbered("\ue242"),
gmd_format_paint("\ue243"),
gmd_format_quote("\ue244"),
gmd_format_size("\ue245"),
gmd_format_strikethrough("\ue246"),
gmd_format_textdirection_l_to_r("\ue247"),
gmd_format_textdirection_r_to_l("\ue248"),
gmd_format_underlined("\ue249"),
gmd_forum("\ue0bf"),
gmd_forward("\ue154"),
gmd_forward_10("\ue056"),
gmd_forward_30("\ue057"),
gmd_forward_5("\ue058"),
gmd_fullscreen("\ue5d0"),
gmd_fullscreen_exit("\ue5d1"),
gmd_functions("\ue24a"),
gmd_gamepad("\ue30f"),
gmd_games("\ue021"),
gmd_gesture("\ue155"),
gmd_get_app("\ue884"),
gmd_gif("\ue908"),
gmd_gps_fixed("\ue1b3"),
gmd_gps_not_fixed("\ue1b4"),
gmd_gps_off("\ue1b5"),
gmd_grade("\ue885"),
gmd_gradient("\ue3e9"),
gmd_grain("\ue3ea"),
gmd_graphic_eq("\ue1b8"),
gmd_grid_off("\ue3eb"),
gmd_grid_on("\ue3ec"),
gmd_group("\ue7ef"),
gmd_group_add("\ue7f0"),
gmd_group_work("\ue886"),
gmd_hd("\ue052"),
gmd_hdr_off("\ue3ed"),
gmd_hdr_on("\ue3ee"),
gmd_hdr_strong("\ue3f1"),
gmd_hdr_weak("\ue3f2"),
gmd_headset("\ue310"),
gmd_headset_mic("\ue311"),
gmd_healing("\ue3f3"),
gmd_hearing("\ue023"),
gmd_help("\ue887"),
gmd_help_outline("\ue8fd"),
gmd_high_quality("\ue024"),
gmd_highlight_off("\ue888"),
gmd_history("\ue889"),
gmd_home("\ue88a"),
gmd_hotel("\ue53a"),
gmd_hourglass_empty("\ue88b"),
gmd_hourglass_full("\ue88c"),
gmd_http("\ue902"),
gmd_https("\ue88d"),
gmd_image("\ue3f4"),
gmd_image_aspect_ratio("\ue3f5"),
gmd_import_export("\ue0c3"),
gmd_inbox("\ue156"),
gmd_indeterminate_check_box("\ue909"),
gmd_info("\ue88e"),
gmd_info_outline("\ue88f"),
gmd_input("\ue890"),
gmd_insert_chart("\ue24b"),
gmd_insert_comment("\ue24c"),
gmd_insert_drive_file("\ue24d"),
gmd_insert_emoticon("\ue24e"),
gmd_insert_invitation("\ue24f"),
gmd_insert_link("\ue250"),
gmd_insert_photo("\ue251"),
gmd_invert_colors("\ue891"),
gmd_invert_colors_off("\ue0c4"),
gmd_iso("\ue3f6"),
gmd_keyboard("\ue312"),
gmd_keyboard_arrow_down("\ue313"),
gmd_keyboard_arrow_left("\ue314"),
gmd_keyboard_arrow_right("\ue315"),
gmd_keyboard_arrow_up("\ue316"),
gmd_keyboard_backspace("\ue317"),
gmd_keyboard_capslock("\ue318"),
gmd_keyboard_hide("\ue31a"),
gmd_keyboard_return("\ue31b"),
gmd_keyboard_tab("\ue31c"),
gmd_keyboard_voice("\ue31d"),
gmd_label("\ue892"),
gmd_label_outline("\ue893"),
gmd_landscape("\ue3f7"),
gmd_language("\ue894"),
gmd_laptop("\ue31e"),
gmd_laptop_chromebook("\ue31f"),
gmd_laptop_mac("\ue320"),
gmd_laptop_windows("\ue321"),
gmd_launch("\ue895"),
gmd_layers("\ue53b"),
gmd_layers_clear("\ue53c"),
gmd_leak_add("\ue3f8"),
gmd_leak_remove("\ue3f9"),
gmd_lens("\ue3fa"),
gmd_library_add("\ue02e"),
gmd_library_books("\ue02f"),
gmd_library_music("\ue030"),
gmd_link("\ue157"),
gmd_list("\ue896"),
gmd_live_help("\ue0c6"),
gmd_live_tv("\ue639"),
gmd_local_activity("\ue53f"),
gmd_local_airport("\ue53d"),
gmd_local_atm("\ue53e"),
gmd_local_bar("\ue540"),
gmd_local_cafe("\ue541"),
gmd_local_car_wash("\ue542"),
gmd_local_convenience_store("\ue543"),
gmd_local_dining("\ue556"),
gmd_local_drink("\ue544"),
gmd_local_florist("\ue545"),
gmd_local_gas_station("\ue546"),
gmd_local_grocery_store("\ue547"),
gmd_local_hospital("\ue548"),
gmd_local_hotel("\ue549"),
gmd_local_laundry_service("\ue54a"),
gmd_local_library("\ue54b"),
gmd_local_mall("\ue54c"),
gmd_local_movies("\ue54d"),
gmd_local_offer("\ue54e"),
gmd_local_parking("\ue54f"),
gmd_local_pharmacy("\ue550"),
gmd_local_phone("\ue551"),
gmd_local_pizza("\ue552"),
gmd_local_play("\ue553"),
gmd_local_post_office("\ue554"),
gmd_local_printshop("\ue555"),
gmd_local_see("\ue557"),
gmd_local_shipping("\ue558"),
gmd_local_taxi("\ue559"),
gmd_location_city("\ue7f1"),
gmd_location_disabled("\ue1b6"),
gmd_location_off("\ue0c7"),
gmd_location_on("\ue0c8"),
gmd_location_searching("\ue1b7"),
gmd_lock("\ue897"),
gmd_lock_open("\ue898"),
gmd_lock_outline("\ue899"),
gmd_looks("\ue3fc"),
gmd_looks_3("\ue3fb"),
gmd_looks_4("\ue3fd"),
gmd_looks_5("\ue3fe"),
gmd_looks_6("\ue3ff"),
gmd_looks_one("\ue400"),
gmd_looks_two("\ue401"),
gmd_loop("\ue028"),
gmd_loupe("\ue402"),
gmd_loyalty("\ue89a"),
gmd_mail("\ue158"),
gmd_map("\ue55b"),
gmd_markunread("\ue159"),
gmd_markunread_mailbox("\ue89b"),
gmd_memory("\ue322"),
gmd_menu("\ue5d2"),
gmd_merge_type("\ue252"),
gmd_message("\ue0c9"),
gmd_mic("\ue029"),
gmd_mic_none("\ue02a"),
gmd_mic_off("\ue02b"),
gmd_mms("\ue618"),
gmd_mode_comment("\ue253"),
gmd_mode_edit("\ue254"),
gmd_money_off("\ue25c"),
gmd_monochrome_photos("\ue403"),
gmd_mood("\ue7f2"),
gmd_mood_bad("\ue7f3"),
gmd_more("\ue619"),
gmd_more_horiz("\ue5d3"),
gmd_more_vert("\ue5d4"),
gmd_mouse("\ue323"),
gmd_movie("\ue02c"),
gmd_movie_creation("\ue404"),
gmd_music_note("\ue405"),
gmd_my_location("\ue55c"),
gmd_nature("\ue406"),
gmd_nature_people("\ue407"),
gmd_navigate_before("\ue408"),
gmd_navigate_next("\ue409"),
gmd_navigation("\ue55d"),
gmd_network_cell("\ue1b9"),
gmd_network_locked("\ue61a"),
gmd_network_wifi("\ue1ba"),
gmd_new_releases("\ue031"),
gmd_nfc("\ue1bb"),
gmd_no_sim("\ue0cc"),
gmd_not_interested("\ue033"),
gmd_note_add("\ue89c"),
gmd_notifications("\ue7f4"),
gmd_notifications_active("\ue7f7"),
gmd_notifications_none("\ue7f5"),
gmd_notifications_off("\ue7f6"),
gmd_notifications_paused("\ue7f8"),
gmd_offline_pin("\ue90a"),
gmd_ondemand_video("\ue63a"),
gmd_open_in_browser("\ue89d"),
gmd_open_in_new("\ue89e"),
gmd_open_with("\ue89f"),
gmd_pages("\ue7f9"),
gmd_pageview("\ue8a0"),
gmd_palette("\ue40a"),
gmd_panorama("\ue40b"),
gmd_panorama_fish_eye("\ue40c"),
gmd_panorama_horizontal("\ue40d"),
gmd_panorama_vertical("\ue40e"),
gmd_panorama_wide_angle("\ue40f"),
gmd_party_mode("\ue7fa"),
gmd_pause("\ue034"),
gmd_pause_circle_filled("\ue035"),
gmd_pause_circle_outline("\ue036"),
gmd_payment("\ue8a1"),
gmd_people("\ue7fb"),
gmd_people_outline("\ue7fc"),
gmd_perm_camera_mic("\ue8a2"),
gmd_perm_contact_calendar("\ue8a3"),
gmd_perm_data_setting("\ue8a4"),
gmd_perm_device_information("\ue8a5"),
gmd_perm_identity("\ue8a6"),
gmd_perm_media("\ue8a7"),
gmd_perm_phone_msg("\ue8a8"),
gmd_perm_scan_wifi("\ue8a9"),
gmd_person("\ue7fd"),
gmd_person_add("\ue7fe"),
gmd_person_outline("\ue7ff"),
gmd_person_pin("\ue55a"),
gmd_personal_video("\ue63b"),
gmd_phone("\ue0cd"),
gmd_phone_android("\ue324"),
gmd_phone_bluetooth_speaker("\ue61b"),
gmd_phone_forwarded("\ue61c"),
gmd_phone_in_talk("\ue61d"),
gmd_phone_iphone("\ue325"),
gmd_phone_locked("\ue61e"),
gmd_phone_missed("\ue61f"),
gmd_phone_paused("\ue620"),
gmd_phonelink("\ue326"),
gmd_phonelink_erase("\ue0db"),
gmd_phonelink_lock("\ue0dc"),
gmd_phonelink_off("\ue327"),
gmd_phonelink_ring("\ue0dd"),
gmd_phonelink_setup("\ue0de"),
gmd_photo("\ue410"),
gmd_photo_album("\ue411"),
gmd_photo_camera("\ue412"),
gmd_photo_library("\ue413"),
gmd_photo_size_select_actual("\ue432"),
gmd_photo_size_select_large("\ue433"),
gmd_photo_size_select_small("\ue434"),
gmd_picture_as_pdf("\ue415"),
gmd_picture_in_picture("\ue8aa"),
gmd_pin_drop("\ue55e"),
gmd_place("\ue55f"),
gmd_play_arrow("\ue037"),
gmd_play_circle_filled("\ue038"),
gmd_play_circle_outline("\ue039"),
gmd_play_for_work("\ue906"),
gmd_playlist_add("\ue03b"),
gmd_plus_one("\ue800"),
gmd_poll("\ue801"),
gmd_polymer("\ue8ab"),
gmd_portable_wifi_off("\ue0ce"),
gmd_portrait("\ue416"),
gmd_power("\ue63c"),
gmd_power_input("\ue336"),
gmd_power_settings_new("\ue8ac"),
gmd_present_to_all("\ue0df"),
gmd_print("\ue8ad"),
gmd_public("\ue80b"),
gmd_publish("\ue255"),
gmd_query_builder("\ue8ae"),
gmd_question_answer("\ue8af"),
gmd_queue("\ue03c"),
gmd_queue_music("\ue03d"),
gmd_radio("\ue03e"),
gmd_radio_button_checked("\ue837"),
gmd_radio_button_unchecked("\ue836"),
gmd_rate_review("\ue560"),
gmd_receipt("\ue8b0"),
gmd_recent_actors("\ue03f"),
gmd_redeem("\ue8b1"),
gmd_redo("\ue15a"),
gmd_refresh("\ue5d5"),
gmd_remove("\ue15b"),
gmd_remove_circle("\ue15c"),
gmd_remove_circle_outline("\ue15d"),
gmd_remove_red_eye("\ue417"),
gmd_reorder("\ue8fe"),
gmd_repeat("\ue040"),
gmd_repeat_one("\ue041"),
gmd_replay("\ue042"),
gmd_replay_10("\ue059"),
gmd_replay_30("\ue05a"),
gmd_replay_5("\ue05b"),
gmd_reply("\ue15e"),
gmd_reply_all("\ue15f"),
gmd_report("\ue160"),
gmd_report_problem("\ue8b2"),
gmd_restaurant_menu("\ue561"),
gmd_restore("\ue8b3"),
gmd_ring_volume("\ue0d1"),
gmd_room("\ue8b4"),
gmd_rotate_90_degrees_ccw("\ue418"),
gmd_rotate_left("\ue419"),
gmd_rotate_right("\ue41a"),
gmd_router("\ue328"),
gmd_satellite("\ue562"),
gmd_save("\ue161"),
gmd_scanner("\ue329"),
gmd_schedule("\ue8b5"),
gmd_school("\ue80c"),
gmd_screen_lock_landscape("\ue1be"),
gmd_screen_lock_portrait("\ue1bf"),
gmd_screen_lock_rotation("\ue1c0"),
gmd_screen_rotation("\ue1c1"),
gmd_sd_card("\ue623"),
gmd_sd_storage("\ue1c2"),
gmd_search("\ue8b6"),
gmd_security("\ue32a"),
gmd_select_all("\ue162"),
gmd_send("\ue163"),
gmd_settings("\ue8b8"),
gmd_settings_applications("\ue8b9"),
gmd_settings_backup_restore("\ue8ba"),
gmd_settings_bluetooth("\ue8bb"),
gmd_settings_brightness("\ue8bd"),
gmd_settings_cell("\ue8bc"),
gmd_settings_ethernet("\ue8be"),
gmd_settings_input_antenna("\ue8bf"),
gmd_settings_input_component("\ue8c0"),
gmd_settings_input_composite("\ue8c1"),
gmd_settings_input_hdmi("\ue8c2"),
gmd_settings_input_svideo("\ue8c3"),
gmd_settings_overscan("\ue8c4"),
gmd_settings_phone("\ue8c5"),
gmd_settings_power("\ue8c6"),
gmd_settings_remote("\ue8c7"),
gmd_settings_system_daydream("\ue1c3"),
gmd_settings_voice("\ue8c8"),
gmd_share("\ue80d"),
gmd_shop("\ue8c9"),
gmd_shop_two("\ue8ca"),
gmd_shopping_basket("\ue8cb"),
gmd_shopping_cart("\ue8cc"),
gmd_shuffle("\ue043"),
gmd_signal_cellular_4_bar("\ue1c8"),
gmd_signal_cellular_connected_no_internet_4_bar("\ue1cd"),
gmd_signal_cellular_no_sim("\ue1ce"),
gmd_signal_cellular_null("\ue1cf"),
gmd_signal_cellular_off("\ue1d0"),
gmd_signal_wifi_4_bar("\ue1d8"),
gmd_signal_wifi_4_bar_lock("\ue1d9"),
gmd_signal_wifi_off("\ue1da"),
gmd_sim_card("\ue32b"),
gmd_sim_card_alert("\ue624"),
gmd_skip_next("\ue044"),
gmd_skip_previous("\ue045"),
gmd_slideshow("\ue41b"),
gmd_smartphone("\ue32c"),
gmd_sms("\ue625"),
gmd_sms_failed("\ue626"),
gmd_snooze("\ue046"),
gmd_sort("\ue164"),
gmd_sort_by_alpha("\ue053"),
gmd_space_bar("\ue256"),
gmd_speaker("\ue32d"),
gmd_speaker_group("\ue32e"),
gmd_speaker_notes("\ue8cd"),
gmd_speaker_phone("\ue0d2"),
gmd_spellcheck("\ue8ce"),
gmd_star("\ue838"),
gmd_star_border("\ue83a"),
gmd_star_half("\ue839"),
gmd_stars("\ue8d0"),
gmd_stay_current_landscape("\ue0d3"),
gmd_stay_current_portrait("\ue0d4"),
gmd_stay_primary_landscape("\ue0d5"),
gmd_stay_primary_portrait("\ue0d6"),
gmd_stop("\ue047"),
gmd_storage("\ue1db"),
gmd_store("\ue8d1"),
gmd_store_mall_directory("\ue563"),
gmd_straighten("\ue41c"),
gmd_strikethrough_s("\ue257"),
gmd_style("\ue41d"),
gmd_subject("\ue8d2"),
gmd_subtitles("\ue048"),
gmd_supervisor_account("\ue8d3"),
gmd_surround_sound("\ue049"),
gmd_swap_calls("\ue0d7"),
gmd_swap_horiz("\ue8d4"),
gmd_swap_vert("\ue8d5"),
gmd_swap_vertical_circle("\ue8d6"),
gmd_switch_camera("\ue41e"),
gmd_switch_video("\ue41f"),
gmd_sync("\ue627"),
gmd_sync_disabled("\ue628"),
gmd_sync_problem("\ue629"),
gmd_system_update("\ue62a"),
gmd_system_update_alt("\ue8d7"),
gmd_tab("\ue8d8"),
gmd_tab_unselected("\ue8d9"),
gmd_tablet("\ue32f"),
gmd_tablet_android("\ue330"),
gmd_tablet_mac("\ue331"),
gmd_tag_faces("\ue420"),
gmd_tap_and_play("\ue62b"),
gmd_terrain("\ue564"),
gmd_text_format("\ue165"),
gmd_textsms("\ue0d8"),
gmd_texture("\ue421"),
gmd_theaters("\ue8da"),
gmd_thumb_down("\ue8db"),
gmd_thumb_up("\ue8dc"),
gmd_thumbs_up_down("\ue8dd"),
gmd_time_to_leave("\ue62c"),
gmd_timelapse("\ue422"),
gmd_timer("\ue425"),
gmd_timer_10("\ue423"),
gmd_timer_3("\ue424"),
gmd_timer_off("\ue426"),
gmd_toc("\ue8de"),
gmd_today("\ue8df"),
gmd_toll("\ue8e0"),
gmd_tonality("\ue427"),
gmd_toys("\ue332"),
gmd_track_changes("\ue8e1"),
gmd_traffic("\ue565"),
gmd_transform("\ue428"),
gmd_translate("\ue8e2"),
gmd_trending_down("\ue8e3"),
gmd_trending_flat("\ue8e4"),
gmd_trending_up("\ue8e5"),
gmd_tune("\ue429"),
gmd_turned_in("\ue8e6"),
gmd_turned_in_not("\ue8e7"),
gmd_tv("\ue333"),
gmd_undo("\ue166"),
gmd_unfold_less("\ue5d6"),
gmd_unfold_more("\ue5d7"),
gmd_usb("\ue1e0"),
gmd_verified_user("\ue8e8"),
gmd_vertical_align_bottom("\ue258"),
gmd_vertical_align_center("\ue259"),
gmd_vertical_align_top("\ue25a"),
gmd_vibration("\ue62d"),
gmd_video_library("\ue04a"),
gmd_videocam("\ue04b"),
gmd_videocam_off("\ue04c"),
gmd_view_agenda("\ue8e9"),
gmd_view_array("\ue8ea"),
gmd_view_carousel("\ue8eb"),
gmd_view_column("\ue8ec"),
gmd_view_comfy("\ue42a"),
gmd_view_compact("\ue42b"),
gmd_view_day("\ue8ed"),
gmd_view_headline("\ue8ee"),
gmd_view_list("\ue8ef"),
gmd_view_module("\ue8f0"),
gmd_view_quilt("\ue8f1"),
gmd_view_stream("\ue8f2"),
gmd_view_week("\ue8f3"),
gmd_vignette("\ue435"),
gmd_visibility("\ue8f4"),
gmd_visibility_off("\ue8f5"),
gmd_voice_chat("\ue62e"),
gmd_voicemail("\ue0d9"),
gmd_volume_down("\ue04d"),
gmd_volume_mute("\ue04e"),
gmd_volume_off("\ue04f"),
gmd_volume_up("\ue050"),
gmd_vpn_key("\ue0da"),
gmd_vpn_lock("\ue62f"),
gmd_wallpaper("\ue1bc"),
gmd_warning("\ue002"),
gmd_watch("\ue334"),
gmd_wb_auto("\ue42c"),
gmd_wb_cloudy("\ue42d"),
gmd_wb_incandescent("\ue42e"),
gmd_wb_iridescent("\ue436"),
gmd_wb_sunny("\ue430"),
gmd_wc("\ue63d"),
gmd_web("\ue051"),
gmd_whatshot("\ue80e"),
gmd_widgets("\ue1bd"),
gmd_wifi("\ue63e"),
gmd_wifi_lock("\ue1e1"),
gmd_wifi_tethering("\ue1e2"),
gmd_work("\ue8f9"),
gmd_wrap_text("\ue25b"),
gmd_youtube_searched_for("\ue8fa"),
gmd_zoom_in("\ue8ff"),
gmd_zoom_out("\ue900");
private final String id;
Icons(String s) {
this.id = s;
}
public String getName() {
return name();
}
@Override
public String toString() {
return id;
}
}
}
| |
package us.misterwok.app.activity;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBar;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.facebook.Request;
import com.facebook.Response;
import com.facebook.Session;
import com.facebook.SessionState;
import com.facebook.model.GraphUser;
import com.google.gson.Gson;
import com.loopj.android.http.JsonHttpResponseHandler;
import com.loopj.android.http.RequestParams;
import org.apache.http.Header;
import org.json.JSONObject;
import java.util.ArrayList;
import us.misterwok.app.Application;
import us.misterwok.app.BuildVariants;
import us.misterwok.app.Constants;
import us.misterwok.app.R;
import us.misterwok.app.api.APIEngine;
import us.misterwok.app.api.obj.LoginObj;
import us.misterwok.app.fragment.CategoryListFragment;
import us.misterwok.app.fragment.NavigationDrawerFragment;
import us.misterwok.app.obj.LeftMenuItem;
import us.misterwok.app.utils.GooglePlayServiceHelper;
public class MainActivity extends BaseActivity
implements NavigationDrawerFragment.NavigationDrawerCallbacks {
public static final int INDEX_MENU = 0;
public static final int INDEX_CART = 1;
public static final int INDEX_ABOUT = 2;
public static final int INDEX_USER = 3;
public static final int INDEX_ORDER = 4;
private NavigationDrawerFragment mNavigationDrawerFragment;
private CharSequence mTitle;
private ArrayList<LeftMenuItem> leftMenuItems;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mTitle = getTitle();
mNavigationDrawerFragment = (NavigationDrawerFragment)
getSupportFragmentManager().findFragmentById(R.id.navigation_drawer);
mNavigationDrawerFragment.setUp(
R.id.navigation_drawer,
(DrawerLayout) findViewById(R.id.drawer_layout));
initDrawerItems();
GooglePlayServiceHelper googlePlayServiceHelper = new GooglePlayServiceHelper(this);
if (googlePlayServiceHelper.checkPlayServices()) {
googlePlayServiceHelper.init();
}
Application.notificationId = 0;
}
@Override
public void onNavigationDrawerItemSelected(int position) {
Intent intent;
switch (position) {
case INDEX_MENU:
getSupportFragmentManager().beginTransaction()
.replace(R.id.container,
CategoryListFragment.newInstance(),
CategoryListFragment.class.getCanonicalName())
.commit();
break;
case INDEX_CART:
intent = new Intent(MainActivity.this, CartActivity.class);
startActivity(intent);
break;
case INDEX_USER:
SharedPreferences sharedPreferences = getSharedPreferences(getPackageName(), Activity.MODE_PRIVATE);
String name = sharedPreferences.getString(Constants.PREFERENCE_NAME, null);
if (TextUtils.isEmpty(name)) {
onFacebookLogin();
} else {
onFacebookLogout();
}
break;
case INDEX_ABOUT:
intent = new Intent(MainActivity.this, AboutActivity.class);
startActivity(intent);
break;
case INDEX_ORDER:
sharedPreferences = getSharedPreferences(getPackageName(), Activity.MODE_PRIVATE);
String email = sharedPreferences.getString(Constants.PREFERENCE_ADMIN_EMAIL, null);
if (TextUtils.isEmpty(email)) {
intent = new Intent(MainActivity.this, AdminLoginActivity.class);
} else {
intent = new Intent(MainActivity.this, OrderActivity.class);
}
startActivity(intent);
break;
default:
break;
}
}
public void onSectionAttached(int titleId) {
mTitle = getString(titleId);
}
public void restoreActionBar() {
ActionBar actionBar = getSupportActionBar();
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setTitle(mTitle);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (!mNavigationDrawerFragment.isDrawerOpen()) {
getMenuInflater().inflate(R.menu.main, menu);
restoreActionBar();
return true;
}
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_about:
onNavigationDrawerItemSelected(INDEX_ABOUT);
break;
case R.id.action_call:
Intent intent = new Intent(Intent.ACTION_CALL, Uri.parse("tel:" + getString(R.string.store_phone_number)));
startActivity(intent);
break;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
Session.getActiveSession().onActivityResult(this, requestCode, resultCode, data);
}
public void onCartButtonClick(View view) {
SharedPreferences sharedPreferences = getSharedPreferences(getPackageName(), MODE_PRIVATE);
String name = sharedPreferences.getString(Constants.PREFERENCE_NAME, null);
if (TextUtils.isEmpty(name)) {
onFacebookLogin();
} else {
onNavigationDrawerItemSelected(1);
}
}
private void initDrawerItems() {
leftMenuItems = new ArrayList<LeftMenuItem>();
leftMenuItems.add(new LeftMenuItem(R.drawable.ic_action_star, getString(R.string.title_menu)));
leftMenuItems.add(new LeftMenuItem(R.drawable.ic_action_cart, getString(R.string.title_cart)));
leftMenuItems.add(new LeftMenuItem(R.drawable.ic_action_about, getString(R.string.title_about)));
SharedPreferences sharedPreferences = getSharedPreferences(getPackageName(), Activity.MODE_PRIVATE);
String name = sharedPreferences.getString(Constants.PREFERENCE_NAME, null);
if (TextUtils.isEmpty(name)) {
leftMenuItems.add(new LeftMenuItem(R.drawable.ic_action_user, getString(R.string.title_login)));
} else {
leftMenuItems.add(new LeftMenuItem(R.drawable.ic_action_back, getString(R.string.title_logout)));
}
if (BuildVariants.IS_ADMIN) {
leftMenuItems.add(new LeftMenuItem(R.drawable.ic_action_star, getString(R.string.title_order)));
}
mNavigationDrawerFragment.setLeftMenuItems(leftMenuItems);
}
private void onFacebookLogin() {
final ProgressDialog progressDialog = ProgressDialog.show(MainActivity.this,
getString(R.string.dialog_create_user_title),
getString(R.string.dialog_create_user_message));
Session.openActiveSession(this, true, new Session.StatusCallback() {
@Override
public void call(Session session, SessionState state,
Exception exception) {
if (session.isOpened()) {
Request.newMeRequest(session, new Request.GraphUserCallback() {
@Override
public void onCompleted(final GraphUser user, Response response) {
if (user != null) {
SharedPreferences sharedPreferences = getSharedPreferences(
getPackageName(), Activity.MODE_PRIVATE);
String gcmId = sharedPreferences.getString(Constants.PREFERENCE_GCM_REGISTRATION, "");
RequestParams requestParams = new RequestParams();
requestParams.put("api_key", "android");
requestParams.put("facebook_name", user.getName());
requestParams.put("facebook_id", user.getId());
requestParams.put("gcm", gcmId);
APIEngine.loginUser(requestParams, new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Header[] headers, String responseBody) {
LoginObj loginObj = new Gson().fromJson(responseBody, LoginObj.class);
progressDialog.dismiss();
SharedPreferences sharedPreferences = getSharedPreferences(getPackageName(), MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(Constants.PREFERENCE_NAME, user.getId());
editor.putString(Constants.PREFERENCE_FACEBOOK_ID, user.getId());
editor.putString(Constants.PREFERENCE_API_KEY, loginObj.data);
editor.commit();
initDrawerItems();
onNavigationDrawerItemSelected(1);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
progressDialog.dismiss();
super.onFailure(e, errorResponse);
}
});
}
}
}).executeAsync();
} else {
progressDialog.dismiss();
}
}
});
}
private void onFacebookLogout() {
Session session = Session.getActiveSession();
if (session != null) {
if (!session.isClosed()) {
session.closeAndClearTokenInformation();
}
} else {
session = new Session(MainActivity.this);
Session.setActiveSession(session);
session.closeAndClearTokenInformation();
}
SharedPreferences sharedPreferences = getSharedPreferences(getPackageName(), MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(Constants.PREFERENCE_NAME, null);
editor.putString(Constants.PREFERENCE_FACEBOOK_ID, null);
editor.putString(Constants.PREFERENCE_API_KEY, null);
editor.commit();
initDrawerItems();
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static io.netty.util.AsciiString.CASE_SENSITIVE_HASHER;
import static io.netty.util.AsciiString.isUpperCase;
import io.netty.handler.codec.CharSequenceValueConverter;
import io.netty.handler.codec.DefaultHeaders;
import io.netty.util.AsciiString;
import io.netty.util.ByteProcessor;
import io.netty.util.internal.PlatformDependent;
public class DefaultHttp2Headers
extends DefaultHeaders<CharSequence, CharSequence, Http2Headers> implements Http2Headers {
private static final ByteProcessor HTTP2_NAME_VALIDATOR_PROCESSOR = new ByteProcessor() {
@Override
public boolean process(byte value) throws Exception {
return !isUpperCase(value);
}
};
private static final NameValidator<CharSequence> HTTP2_NAME_VALIDATOR = new NameValidator<CharSequence>() {
@Override
public void validateName(CharSequence name) {
if (name instanceof AsciiString) {
final int index;
try {
index = ((AsciiString) name).forEachByte(HTTP2_NAME_VALIDATOR_PROCESSOR);
} catch (Http2Exception e) {
PlatformDependent.throwException(e);
return;
} catch (Throwable t) {
PlatformDependent.throwException(connectionError(PROTOCOL_ERROR, t,
"unexpected error. invalid header name [%s]", name));
return;
}
if (index != -1) {
PlatformDependent.throwException(connectionError(PROTOCOL_ERROR,
"invalid header name [%s]", name));
}
} else {
for (int i = 0; i < name.length(); ++i) {
if (isUpperCase(name.charAt(i))) {
PlatformDependent.throwException(connectionError(PROTOCOL_ERROR,
"invalid header name [%s]", name));
}
}
}
}
};
private HeaderEntry<CharSequence, CharSequence> firstNonPseudo = head;
/**
* Create a new instance.
* <p>
* Header names will be validated according to
* <a href="https://tools.ietf.org/html/rfc7540">rfc7540</a>.
*/
public DefaultHttp2Headers() {
this(true);
}
/**
* Create a new instance.
* @param validate {@code true} to validate header names according to
* <a href="https://tools.ietf.org/html/rfc7540">rfc7540</a>. {@code false} to not validate header names.
*/
@SuppressWarnings("unchecked")
public DefaultHttp2Headers(boolean validate) {
// Case sensitive compare is used because it is cheaper, and header validation can be used to catch invalid
// headers.
super(CASE_SENSITIVE_HASHER,
CharSequenceValueConverter.INSTANCE,
validate ? HTTP2_NAME_VALIDATOR : NameValidator.NOT_NULL);
}
/**
* Create a new instance.
* @param validate {@code true} to validate header names according to
* <a href="https://tools.ietf.org/html/rfc7540">rfc7540</a>. {@code false} to not validate header names.
* @param arraySizeHint A hint as to how large the hash data structure should be.
* The next positive power of two will be used. An upper bound may be enforced.
*/
@SuppressWarnings("unchecked")
public DefaultHttp2Headers(boolean validate, int arraySizeHint) {
// Case sensitive compare is used because it is cheaper, and header validation can be used to catch invalid
// headers.
super(CASE_SENSITIVE_HASHER,
CharSequenceValueConverter.INSTANCE,
validate ? HTTP2_NAME_VALIDATOR : NameValidator.NOT_NULL,
arraySizeHint);
}
@Override
public Http2Headers clear() {
this.firstNonPseudo = head;
return super.clear();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Http2Headers)) {
return false;
}
return equals((Http2Headers) o, CASE_SENSITIVE_HASHER);
}
@Override
public int hashCode() {
return hashCode(CASE_SENSITIVE_HASHER);
}
@Override
public Http2Headers method(CharSequence value) {
set(PseudoHeaderName.METHOD.value(), value);
return this;
}
@Override
public Http2Headers scheme(CharSequence value) {
set(PseudoHeaderName.SCHEME.value(), value);
return this;
}
@Override
public Http2Headers authority(CharSequence value) {
set(PseudoHeaderName.AUTHORITY.value(), value);
return this;
}
@Override
public Http2Headers path(CharSequence value) {
set(PseudoHeaderName.PATH.value(), value);
return this;
}
@Override
public Http2Headers status(CharSequence value) {
set(PseudoHeaderName.STATUS.value(), value);
return this;
}
@Override
public CharSequence method() {
return get(PseudoHeaderName.METHOD.value());
}
@Override
public CharSequence scheme() {
return get(PseudoHeaderName.SCHEME.value());
}
@Override
public CharSequence authority() {
return get(PseudoHeaderName.AUTHORITY.value());
}
@Override
public CharSequence path() {
return get(PseudoHeaderName.PATH.value());
}
@Override
public CharSequence status() {
return get(PseudoHeaderName.STATUS.value());
}
@Override
protected final HeaderEntry<CharSequence, CharSequence> newHeaderEntry(int h, CharSequence name, CharSequence value,
HeaderEntry<CharSequence, CharSequence> next) {
return new Http2HeaderEntry(h, name, value, next);
}
private final class Http2HeaderEntry extends HeaderEntry<CharSequence, CharSequence> {
protected Http2HeaderEntry(int hash, CharSequence key, CharSequence value,
HeaderEntry<CharSequence, CharSequence> next) {
super(hash, key);
this.value = value;
this.next = next;
// Make sure the pseudo headers fields are first in iteration order
if (key.length() != 0 && key.charAt(0) == ':') {
after = firstNonPseudo;
before = firstNonPseudo.before();
} else {
after = head;
before = head.before();
if (firstNonPseudo == head) {
firstNonPseudo = this;
}
}
pointNeighborsToThis();
}
@Override
protected void remove() {
if (this == firstNonPseudo) {
firstNonPseudo = firstNonPseudo.after();
}
super.remove();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.cas.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.uima.cas.ArrayFS;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.CASRuntimeException;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.impl.CASImpl;
import org.apache.uima.cas.impl.FeatureImpl;
import org.apache.uima.cas.impl.LowLevelCAS;
import org.apache.uima.cas.impl.LowLevelTypeSystem;
import org.apache.uima.cas.impl.TypeImpl;
import org.apache.uima.cas.impl.TypeSystemConstants;
import org.apache.uima.cas.impl.TypeSystemImpl;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.jcas.cas.FSArray;
import org.apache.uima.jcas.cas.NonEmptyFSList;
import org.apache.uima.jcas.cas.Sofa;
import org.apache.uima.jcas.cas.TOP;
import org.apache.uima.jcas.tcas.Annotation;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/**
* Class comment for FeatureStructureTest.java goes here.
*
*/
public class FeatureStructureTest {
private CASImpl cas;
private TypeSystemImpl ts;
private Type topType;
private Type stringType;
private TypeImpl tokenType;
private Type intType;
private TypeImpl tokenTypeType;
private Type wordType;
private Type arrayFsWithSubtypeType;
private Feature arrayFsWithSubtypeTypeFeat;
private Type group1Type;
private Type group2Type;
private Type langPairType;
private Type neListType;
private Feature lang1;
private Feature lang2;
private Feature descr;
private Feature tokenTypeFeat;
private Feature lemmaFeat;
private Feature sentLenFeat;
private Feature tokenFloatFeat;
private Feature tokenLongFeat;
private Feature tokenDoubleFeat;
private Feature startFeature;
private Feature tlFeature;
private Feature hdFeature;
@BeforeEach
public void setUp() {
try {
this.cas = (CASImpl) CASInitializer.initCas(new CASTestSetup(), null);
assertTrue(this.cas != null);
this.ts = (TypeSystemImpl) this.cas.getTypeSystem();
assertTrue(this.ts != null);
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
this.topType = this.ts.getType(CAS.TYPE_NAME_TOP);
assertTrue(this.topType != null);
this.stringType = this.ts.getType(CAS.TYPE_NAME_STRING);
assertTrue(this.stringType != null);
this.tokenType = this.ts.getType(CASTestSetup.TOKEN_TYPE);
assertTrue(this.stringType != null);
this.intType = this.ts.getType(CAS.TYPE_NAME_INTEGER);
assertTrue(this.intType != null);
this.tokenTypeType = this.ts.getType(CASTestSetup.TOKEN_TYPE_TYPE);
assertTrue(this.tokenTypeType != null);
this.wordType = this.ts.getType(CASTestSetup.WORD_TYPE);
assertTrue(this.wordType != null);
this.arrayFsWithSubtypeType = this.ts.getType(CASTestSetup.ARRAYFSWITHSUBTYPE_TYPE);
assertTrue(this.arrayFsWithSubtypeType != null);
this.arrayFsWithSubtypeTypeFeat = this.ts
.getFeatureByFullName(CASTestSetup.ARRAYFSWITHSUBTYPE_TYPE_FEAT_Q);
this.group1Type = this.ts.getType(CASTestSetup.GROUP_1);
assertTrue(this.group1Type != null);
this.group2Type = this.ts.getType(CASTestSetup.GROUP_2);
assertTrue(this.group2Type != null);
this.tokenTypeFeat = this.ts.getFeatureByFullName(CASTestSetup.TOKEN_TYPE_FEAT_Q);
assertTrue(this.tokenTypeFeat != null);
this.lemmaFeat = this.ts.getFeatureByFullName(CASTestSetup.LEMMA_FEAT_Q);
assertTrue(this.lemmaFeat != null);
this.sentLenFeat = this.ts.getFeatureByFullName(CASTestSetup.SENT_LEN_FEAT_Q);
assertTrue(this.sentLenFeat != null);
this.tokenFloatFeat = this.ts.getFeatureByFullName(CASTestSetup.TOKEN_FLOAT_FEAT_Q);
assertTrue(this.tokenFloatFeat != null);
this.tokenDoubleFeat = this.ts.getFeatureByFullName(CASTestSetup.TOKEN_DOUBLE_FEAT_Q);
assertTrue(this.tokenDoubleFeat != null);
this.tokenLongFeat = this.ts.getFeatureByFullName(CASTestSetup.TOKEN_LONG_FEAT_Q);
assertTrue(this.tokenLongFeat != null);
this.startFeature = this.ts.getFeatureByFullName(CAS.FEATURE_FULL_NAME_BEGIN);
assertTrue(this.startFeature != null);
this.langPairType = this.ts.getType(CASTestSetup.LANG_PAIR);
assertTrue(this.langPairType != null);
this.lang1 = this.langPairType.getFeatureByBaseName(CASTestSetup.LANG1);
assertTrue(this.lang1 != null);
this.lang2 = this.langPairType.getFeatureByBaseName(CASTestSetup.LANG2);
assertTrue(this.lang2 != null);
this.descr = this.langPairType.getFeatureByBaseName(CASTestSetup.DESCR_FEAT);
assertTrue(this.descr != null);
this.neListType = this.ts.getType(CAS.TYPE_NAME_NON_EMPTY_FS_LIST);
assertTrue(this.neListType != null);
this.tlFeature = this.neListType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_TAIL);
assertTrue(this.tlFeature != null);
this.hdFeature = this.neListType.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_HEAD);
assertTrue(this.hdFeature != null);
}
@AfterEach
public void tearDown() {
this.cas = null;
this.ts = null;
this.topType = null;
this.stringType = null;
this.tokenType = null;
this.intType = null;
this.tokenTypeType = null;
this.wordType = null;
this.group1Type = null;
this.group2Type = null;
this.tokenTypeFeat = null;
this.lemmaFeat = null;
this.sentLenFeat = null;
this.tokenFloatFeat = null;
this.startFeature = null;
this.langPairType = null;
this.lang1 = null;
this.lang2 = null;
this.descr = null;
}
@Test
public void testErrorDerefDifferentCAS() {
CAS cas2 = CASInitializer.initCas(new CASTestSetup(), null);
Type tokenType1 = this.ts.getType(CASTestSetup.TOKEN_TYPE);
Feature tokenTypeFeature = this.ts
.getFeatureByFullName(CASTestSetup.TOKEN_TYPE + ":" + CASTestSetup.TOKEN_TYPE_FEAT);
FeatureStructure fs1 = cas2.createFS(tokenType1);
FeatureStructure fs = cas.createFS(tokenType1);
boolean caught = false;
try {
fs.setFeatureValue(tokenTypeFeature, fs1);
} catch (Exception e) {
assertTrue(e instanceof CASRuntimeException);
caught = true;
}
assertTrue(caught);
}
@Test
public void testGetType() {
Type tokenType1 = this.ts.getType(CASTestSetup.TOKEN_TYPE);
Type wordType1 = this.ts.getType(CASTestSetup.WORD_TYPE);
FeatureStructure word = this.cas.createFS(wordType1);
FeatureStructure token = this.cas.createFS(tokenType1);
assertTrue(word.getType().equals(wordType1));
assertTrue(token.getType().equals(tokenType1));
}
/**
* This test tests V2 backwards compatibility The goal is to match what V2 did for low level cas
* access The area this is testing is the use of the LL int operations to change the type of an
* existing feature structure.
*/
@Test
public void testLLsetType() {
LowLevelCAS llc = cas.getLowLevelCAS();
FSArray fsa = new FSArray(ts.getType(CAS.TYPE_NAME_FS_ARRAY), cas, 3);
fsa.addToIndexes(); // otherwise won't be replaced later
NonEmptyFSList fsl = new NonEmptyFSList(ts.getType(CAS.TYPE_NAME_NON_EMPTY_FS_LIST), cas);
fsl.addToIndexes(); // otherwise won't be replaced later
Annotation token = this.cas.createFS(tokenType);
cas.setId2FSsMaybeUnconditionally(token);
// set up some refs; these must be updated if the type changes in a way to require a new FS
fsa.set(0, token); // set the 0th element of a FS Array to point to the "token"
fsl.setHead(token); // set the head element of a FS List to point to the "token"
int tokId = token._id();
// set some feature values; some of these are copied (if there's room, etc.)
TOP ttfv = cas.createFS(tokenTypeType);
token.setFeatureValue(tokenTypeFeat, ttfv);
token.setFloatValue(tokenFloatFeat, 1.1f);
Assertions.assertThat(token.getFloatValue(tokenFloatFeat)).isEqualTo(1.1f);
token.setDoubleValue(tokenDoubleFeat, 1.7d);
Assertions.assertThat(token.getDoubleValue(tokenDoubleFeat)).isEqualTo(1.7d);
token.setBegin(3);
token.setEnd(5);
Sofa sofa = (Sofa) token.getSofa();
assertTrue(sofa != null);
assertTrue(fsa.get(0) == token);
assertTrue(fsl.getHead() == token);
// change the type to just Annotation
// because this is a supertype, it should not create a new FS
llc.ll_setIntValue(tokId, 0, TypeSystemConstants.annotTypeCode);
Annotation fs = cas.getFsFromId(tokId);
assertTrue(fs == token);
assertTrue(fs._id() == token._id());
assertEquals(ts.annotType, fs._getTypeImpl());
assertEquals(fs.getBegin(), 3);
assertEquals(fs.getEnd(), 5);
assertEquals(sofa, fs.getSofa());
assertTrue(fsa.get(0) == fs);
assertTrue(fsl.getHead() == fs);
// Change Annotation back to Token type
llc.ll_setIntValue(tokId, 0, tokenType.getCode());
token = cas.getFsFromId(tokId);
assertTrue(fs == token);
assertTrue(fs._id() == token._id());
assertEquals(fs.getBegin(), 3);
assertEquals(fs.getEnd(), 5);
assertEquals(sofa, fs.getSofa());
Assertions.assertThat(token.getFloatValue(tokenFloatFeat)).isEqualTo(1.1f);
assertEquals(ttfv, token.getFeatureValue(tokenTypeFeat));
assertTrue(fsa.get(0) == token);
assertTrue(fsl.getHead() == token);
// change type where the type forces a copy
// token -> token_type_type
// These types are completely orthogonal, one doesn't subsume the other
llc.ll_setIntValue(tokId, 0, tokenTypeType.getCode());
TOP ttt = cas.getFsFromId(tokId);
assertTrue(ttt != token);
assertTrue(ttt._id() == tokId);
assertEquals(ttt._getTypeImpl(), tokenTypeType);
assertTrue(fsa.get(0) == ttt);
assertTrue(fsl.getHead() == ttt);
llc.ll_setIntValue(tokId, 0, tokenType.getCode());
token = cas.getFsFromId(tokId);
assertTrue(ttt != token);
assertTrue(ttt._id() == token._id());
assertEquals(token.getBegin(), 0);
assertEquals(token.getEnd(), 0);
assertEquals(sofa, token.getSofa());
Assertions.assertThat(token.getFloatValue(tokenFloatFeat)).isEqualTo(0.0f);
assertEquals(null, token.getFeatureValue(tokenTypeFeat));
assertTrue(fsa.get(0) == token);
assertTrue(fsl.getHead() == token);
}
@Test
public void testSetArrayValuedFeature() {
FeatureStructure testFS = this.cas.createFS(this.arrayFsWithSubtypeType);
assertTrue(testFS.getFeatureValue(this.arrayFsWithSubtypeTypeFeat) == null);
ArrayFS arrayFS = this.cas.createArrayFS(1);
testFS.setFeatureValue(this.arrayFsWithSubtypeTypeFeat, arrayFS);
assertTrue(true);
boolean caughtExc = false;
try {
testFS.setFeatureValue(this.arrayFsWithSubtypeTypeFeat, testFS);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE));
}
assertTrue(caughtExc);
}
@Test
public void testSetFeatureValue() {
// FeatureStructure token = this.cas.createFS(this.tokenType);
LowLevelCAS llcas = cas.getLowLevelCAS();
int i = llcas.ll_createFS(this.tokenType.getCode());
AnnotationFS token = llcas.ll_getFSForRef(i);
assertTrue(token.getFeatureValue(this.tokenTypeFeat) == null);
assertTrue(token.getStringValue(this.lemmaFeat) == null);
boolean caughtExc = false;
try {
token.getFeatureValue(this.sentLenFeat);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_FEAT));
}
assertTrue(caughtExc);
FeatureStructure word = this.cas.createFS(this.wordType);
token.setFeatureValue(this.tokenTypeFeat, word);
caughtExc = false;
try {
token.setFeatureValue(this.lemmaFeat, word);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE));
}
assertTrue(caughtExc);
try {
token.setFeatureValue(this.tokenTypeFeat, null);
} catch (CASRuntimeException e) {
assertTrue(false);
}
caughtExc = false;
try {
token.setFeatureValue(this.startFeature, null);
} catch (CASRuntimeException e) {
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE));
caughtExc = true;
}
assertTrue(caughtExc);
// a "getter" test, not "setter" test, on purpose
caughtExc = false;
try {
token.getFeatureValue(this.startFeature);
} catch (CASRuntimeException e) {
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE_NOT_FS));
caughtExc = true;
}
assertTrue(caughtExc);
assertTrue(token.getStringValue(this.lemmaFeat) == null);
String testString = "test";
token.setStringValue(this.lemmaFeat, testString);
assertTrue(token.getStringValue(this.lemmaFeat).equals(testString));
testString = "";
token.setStringValue(this.lemmaFeat, testString);
assertTrue(token.getStringValue(this.lemmaFeat).equals(testString));
// test low level
token.setFeatureValue(tokenTypeFeat, word);
int fsRef = token._id();
int fc = ((FeatureImpl) tokenTypeFeat).getCode();
assertEquals(llcas.ll_getIntValue(fsRef, fc), word._id());
int word2_id = llcas.ll_createFS(((TypeImpl) wordType).getCode());
TOP word2 = llcas.ll_getFSForRef(word2_id);
// TOP word2 = cas.createFS(wordType);
llcas.ll_setIntValue(fsRef, fc, word2._id());
assertEquals(token.getFeatureValue(tokenTypeFeat), word2);
}
@Test
public void testSetFloatValue() {
// AnnotationFS token = (AnnotationFS) this.cas.createFS(this.tokenType);
LowLevelCAS llcas = cas.getLowLevelCAS();
int i = llcas.ll_createFS(this.tokenType.getCode());
Annotation token = llcas.ll_getFSForRef(i);
assertTrue(token.getFloatValue(this.tokenFloatFeat) == 0.0f);
float f = -3.2f;
token.setFloatValue(this.tokenFloatFeat, f);
assertTrue(token.getFloatValue(this.tokenFloatFeat) == f);
f = 51234.132f;
token.setFloatValue(this.tokenFloatFeat, f);
assertTrue(token.getFloatValue(this.tokenFloatFeat) == f);
boolean caughtExc = false;
try {
token.setFloatValue(this.tokenTypeFeat, 0.0f);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE));
}
assertTrue(caughtExc);
assertTrue(token.getFloatValue(this.tokenFloatFeat) == f);
caughtExc = false;
try {
token.setFloatValue(this.sentLenFeat, 0.0f);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE));
}
assertTrue(caughtExc);
assertTrue(token.getFloatValue(this.tokenFloatFeat) == f);
// low level
int ffc = ((FeatureImpl) tokenFloatFeat).getCode();
llcas.ll_setIntValue(token._id(), ffc, CASImpl.float2int(123.456f));
Assertions.assertThat(token.getFloatValue(tokenFloatFeat)).isEqualTo(123.456f);
assertEquals(llcas.ll_getIntValue(token._id(), ffc), CASImpl.float2int(123.456f));
}
@Test
public void testSetLongValue() {
// AnnotationFS token = (AnnotationFS) this.cas.createFS(this.tokenType);
LowLevelCAS llcas = cas.getLowLevelCAS();
int i = llcas.ll_createFS(this.tokenType.getCode());
AnnotationFS token = llcas.ll_getFSForRef(i);
assertTrue(token.getLongValue(this.tokenLongFeat) == 0.0f);
long f = -34L;
token.setLongValue(this.tokenLongFeat, f);
assertTrue(token.getLongValue(this.tokenLongFeat) == f);
f = 8_000_000_003L;
token.setLongValue(this.tokenLongFeat, f);
assertTrue(token.getLongValue(this.tokenLongFeat) == f);
f = -8_000_000_003L;
token.setLongValue(this.tokenLongFeat, f);
assertTrue(token.getLongValue(this.tokenLongFeat) == f);
// low level
int ffc = ((FeatureImpl) tokenLongFeat).getCode();
int h = llcas.ll_getIntValue(token._id(), ffc);
assertEquals(1, h);
long g = 23;
token.setLongValue(this.tokenLongFeat, g);
assertEquals(g, token.getLongValue(this.tokenLongFeat));
llcas.ll_setIntValue(token._id(), ffc, h);
assertEquals(f, token.getLongValue(this.tokenLongFeat));
}
@Test
public void testSetDoubleValue() {
// AnnotationFS token = (AnnotationFS) this.cas.createFS(this.tokenType);
LowLevelCAS llcas = cas.getLowLevelCAS();
int i = llcas.ll_createFS(this.tokenType.getCode());
AnnotationFS token = llcas.ll_getFSForRef(i);
assertTrue(token.getDoubleValue(this.tokenDoubleFeat) == 0.0f);
double f = -34.56D;
token.setDoubleValue(this.tokenDoubleFeat, f);
assertTrue(token.getDoubleValue(this.tokenDoubleFeat) == f);
f = 8_000_000_003.24852D;
token.setDoubleValue(this.tokenDoubleFeat, f);
assertTrue(token.getDoubleValue(this.tokenDoubleFeat) == f);
f = -8_000_000_003D;
token.setDoubleValue(this.tokenDoubleFeat, f);
assertTrue(token.getDoubleValue(this.tokenDoubleFeat) == f);
// low level
int ffc = ((FeatureImpl) tokenDoubleFeat).getCode();
int h = llcas.ll_getIntValue(token._id(), ffc);
assertEquals(1, h);
double g = 23;
token.setDoubleValue(this.tokenDoubleFeat, g);
Assertions.assertThat(token.getDoubleValue(this.tokenDoubleFeat)).isEqualTo(g);
llcas.ll_setIntValue(token._id(), ffc, h);
Assertions.assertThat(token.getDoubleValue(this.tokenDoubleFeat)).isEqualTo(f);
}
@Test
public void testSetIntValue() {
// AnnotationFS token = (AnnotationFS) this.cas.createFS(this.tokenType);
// AnnotationFS token = (AnnotationFS) this.cas.createFS(this.tokenType);
LowLevelCAS llcas = cas.getLowLevelCAS();
int j = llcas.ll_createFS(this.tokenType.getCode());
AnnotationFS token = llcas.ll_getFSForRef(j);
assertTrue(token.getIntValue(this.startFeature) == 0);
int i = 3;
token.setIntValue(this.startFeature, i);
assertTrue(token.getIntValue(this.startFeature) == i);
i = -123456;
token.setIntValue(this.startFeature, i);
assertTrue(token.getIntValue(this.startFeature) == i);
boolean caughtExc = false;
try {
token.setIntValue(this.tokenTypeFeat, 0);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_RANGE));
}
assertTrue(caughtExc);
assertTrue(token.getIntValue(this.startFeature) == i);
caughtExc = false;
try {
token.setIntValue(this.sentLenFeat, 0);
} catch (CASRuntimeException e) {
caughtExc = true;
assertTrue(e.getMessageKey().equals(CASRuntimeException.INAPPROP_FEAT));
}
assertTrue(caughtExc);
assertTrue(token.getIntValue(this.startFeature) == i);
}
@Test
public void testStrings() {
FeatureStructure lp = this.cas.createFS(this.langPairType);
assertTrue(lp != null);
// Check that all strings are initially null.
try {
assertTrue(lp.getStringValue(this.lang1) == null);
} catch (Exception e) {
assertTrue(false);
}
try {
assertTrue(lp.getStringValue(this.lang2) == null);
} catch (Exception e) {
assertTrue(false);
}
try {
assertTrue(lp.getStringValue(this.descr) == null);
} catch (Exception e) {
assertTrue(false);
}
// FeatureStructure topFS = cas.createFS(topType);
String val = "Some string.";
try {
lp.setStringValue(this.descr, val);
assertTrue(val.equals(lp.getStringValue(this.descr)));
} catch (CASRuntimeException e) {
assertTrue(false);
}
try {
lp.setStringValue(this.descr, null);
assertTrue(lp.getStringValue(this.descr) == null);
} catch (CASRuntimeException e) {
assertTrue(false);
}
try {
lp.setStringValue(this.lang1, CASTestSetup.GROUP_1_LANGUAGES[0]);
lp.setStringValue(this.lang2, CASTestSetup.GROUP_2_LANGUAGES[2]);
} catch (Exception e) {
assertTrue(false);
}
boolean exc = false;
try {
lp.setStringValue(this.lang1, CASTestSetup.GROUP_2_LANGUAGES[0]);
} catch (CASRuntimeException e) {
assertTrue(e.getMessageKey().equals(CASRuntimeException.ILLEGAL_STRING_VALUE));
exc = true;
}
assertTrue(exc);
exc = false;
try {
lp.setStringValue(this.lang2, val);
} catch (CASRuntimeException e) {
assertTrue(e.getMessageKey().equals(CASRuntimeException.ILLEGAL_STRING_VALUE));
exc = true;
}
assertTrue(exc);
// Regression: toString() used to fail because string subtypes were
// incorrectly classified as ref types.
lp.toString();
LowLevelCAS llc = this.cas.getLowLevelCAS();
LowLevelTypeSystem llts = llc.ll_getTypeSystem();
final int tokenTypeCode = llts.ll_getCodeForType(this.tokenType);
final int addr = llc.ll_createFS(tokenTypeCode);
final int lemmaFeatCode = llts.ll_getCodeForFeature(this.lemmaFeat);
llc.ll_setStringValue(addr, lemmaFeatCode, "test", true);
assertTrue(llc.ll_getCharBufferValueSize(addr, lemmaFeatCode) == 4);
// try accessing low level strings using ll_setIntValue
final int stringcode = llc.ll_getIntValue(addr, lemmaFeatCode);
assertTrue(stringcode == 1);
llc.ll_setStringValue(addr, lemmaFeatCode, "test", true);
assertEquals(llc.ll_getIntValue(addr, lemmaFeatCode), 1); // should not change
llc.ll_setStringValue(addr, lemmaFeatCode, "test2", true);
assertEquals(llc.ll_getIntValue(addr, lemmaFeatCode), 2);
llc.ll_setIntValue(addr, lemmaFeatCode, 1);
assertEquals(llc.ll_getIntValue(addr, lemmaFeatCode), 1);
assertEquals(llc.ll_getStringValue(addr, lemmaFeatCode), "test");
llc.ll_setIntValue(addr, lemmaFeatCode, 0);
assertEquals(llc.ll_getIntValue(addr, lemmaFeatCode), 0);
assertTrue(llc.ll_getStringValue(addr, lemmaFeatCode) == null);
llc.ll_setIntValue(addr, lemmaFeatCode, 2);
assertEquals(llc.ll_getStringValue(addr, lemmaFeatCode), "test2");
// check that equal strings are shared
llc.ll_setStringValue(addr, lemmaFeatCode, new String("test"));
assertEquals(1, llc.ll_getIntValue(addr, lemmaFeatCode));
}
@Test
public void testEquals() {
// ???
}
@Test
public void testToString() {
FeatureStructure listFS = this.cas.createFS(this.neListType);
listFS.setFeatureValue(this.tlFeature, listFS);
System.out.println("toString for fslist, tail -> node, head is null");
System.out.println(listFS.toString());
FeatureStructure value = this.cas.createFS(this.tokenType);
FeatureStructure newList = this.cas.createFS(this.neListType);
newList.setFeatureValue(this.tlFeature, listFS);
newList.setFeatureValue(this.hdFeature, value);
listFS.setFeatureValue(this.hdFeature, value);
System.out.println(
"toString for fslist, tail is prev, prev's head: new token, head is same as rpev's head");
System.out.println(newList.toString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.core;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.ivy.core.cache.ArtifactOrigin;
import org.apache.ivy.core.cache.RepositoryCacheManager;
import org.apache.ivy.core.module.descriptor.Artifact;
import org.apache.ivy.core.module.descriptor.DefaultArtifact;
import org.apache.ivy.core.module.id.ModuleRevisionId;
import org.apache.ivy.core.settings.IvyVariableContainer;
import org.apache.ivy.core.settings.IvyVariableContainerImpl;
import org.apache.ivy.util.Message;
/**
*/
public final class IvyPatternHelper {
private IvyPatternHelper() {
//Helper class
}
public static final String CONF_KEY = "conf";
public static final String TYPE_KEY = "type";
public static final String EXT_KEY = "ext";
public static final String ARTIFACT_KEY = "artifact";
public static final String BRANCH_KEY = "branch";
public static final String REVISION_KEY = "revision";
public static final String MODULE_KEY = "module";
public static final String ORGANISATION_KEY = "organisation";
public static final String ORGANISATION_KEY2 = "organization";
public static final String ORGANISATION_PATH_KEY = "orgPath";
public static final String ORIGINAL_ARTIFACTNAME_KEY = "originalname";
private static final Pattern PARAM_PATTERN = Pattern.compile("\\@\\{(.*?)\\}");
private static final Pattern VAR_PATTERN = Pattern.compile("\\$\\{(.*?)\\}");
public static String substitute(String pattern, ModuleRevisionId moduleRevision) {
return substitute(pattern, moduleRevision.getOrganisation(), moduleRevision.getName(),
moduleRevision.getBranch(), moduleRevision.getRevision(), "ivy", "ivy", "xml", null,
null, moduleRevision.getQualifiedExtraAttributes(), null);
}
public static String substitute(String pattern, ModuleRevisionId moduleRevision,
String artifact, String type, String ext) {
return substitute(pattern, moduleRevision, new DefaultArtifact(moduleRevision, null,
artifact, type, ext));
}
public static String substitute(String pattern, Artifact artifact) {
return substitute(pattern, artifact, (String) null);
}
public static String substitute(String pattern, Artifact artifact, ArtifactOrigin origin) {
return substitute(pattern, artifact.getModuleRevisionId(), artifact, (String) null, origin);
}
public static String substitute(String pattern, Artifact artifact, String conf) {
return substitute(pattern, artifact.getModuleRevisionId(), artifact, conf, (ArtifactOrigin) null);
}
public static String substitute(String pattern, ModuleRevisionId mrid, Artifact artifact) {
return substitute(pattern, mrid, artifact, (String) null, (ArtifactOrigin) null);
}
public static String substitute(String pattern, ModuleRevisionId mrid, Artifact artifact,
String conf, ArtifactOrigin origin) {
return substitute(pattern, mrid.getOrganisation(), mrid.getName(), mrid.getBranch(), mrid
.getRevision(), artifact.getName(), artifact.getType(), artifact.getExt(), conf,
origin, mrid.getQualifiedExtraAttributes(), artifact.getQualifiedExtraAttributes());
}
public static String substitute(String pattern, String org, String module, String revision,
String artifact, String type, String ext) {
return substitute(pattern, org, module, (String) null, revision, artifact, type, ext, (String) null,
(ArtifactOrigin) null, (Map) null, (Map) null);
}
// CheckStyle:ParameterNumber OFF
public static String substitute(String pattern, String org, String module, String revision,
String artifact, String type, String ext, String conf) {
return substitute(pattern, org, module, (String) null, revision, artifact, type, ext, conf,
(ArtifactOrigin) null, (Map) null, (Map) null);
}
public static String substitute(String pattern, String org, String module, String revision,
String artifact, String type, String ext, String conf, Map extraModuleAttributes,
Map extraArtifactAttributes) {
return substitute(pattern, org, module, (String) null, revision, artifact, type, ext, conf,
(ArtifactOrigin) null, extraModuleAttributes, extraArtifactAttributes);
}
public static String substitute(String pattern, String org, String module, String branch,
String revision, String artifact, String type, String ext, String conf,
ArtifactOrigin origin, Map extraModuleAttributes, Map extraArtifactAttributes) {
Map tokens = new HashMap();
if (extraModuleAttributes != null) {
for (Iterator entries = extraModuleAttributes.entrySet().iterator(); entries.hasNext(); ) {
Map.Entry entry = (Map.Entry) entries.next();
String token = (String) entry.getKey();
if (token.indexOf(':') > 0) {
token = token.substring(token.indexOf(':') + 1);
}
tokens.put(token, entry.getValue());
}
}
if (extraArtifactAttributes != null) {
for (Iterator entries = extraArtifactAttributes.entrySet().iterator(); entries.hasNext(); ) {
Map.Entry entry = (Map.Entry) entries.next();
String token = (String) entry.getKey();
if (token.indexOf(':') > 0) {
token = token.substring(token.indexOf(':') + 1);
}
tokens.put(token, entry.getValue());
}
}
tokens.put(ORGANISATION_KEY, org == null ? "" : org);
tokens.put(ORGANISATION_KEY2, org == null ? "" : org);
tokens.put(ORGANISATION_PATH_KEY, org == null ? "" : org.replace('.', '/'));
tokens.put(MODULE_KEY, module == null ? "" : module);
tokens.put(BRANCH_KEY, branch == null ? "" : branch);
tokens.put(REVISION_KEY, revision == null ? "" : revision);
tokens.put(ARTIFACT_KEY, artifact == null ? module : artifact);
tokens.put(TYPE_KEY, type == null ? "jar" : type);
tokens.put(EXT_KEY, ext == null ? "jar" : ext);
tokens.put(CONF_KEY, conf == null ? "default" : conf);
if (origin == null) {
tokens.put(ORIGINAL_ARTIFACTNAME_KEY, new OriginalArtifactNameValue(org,
module, branch, revision, artifact, type, ext, extraModuleAttributes, extraArtifactAttributes));
} else {
tokens.put(ORIGINAL_ARTIFACTNAME_KEY, new OriginalArtifactNameValue(origin));
}
return substituteTokens(pattern, tokens);
}
// CheckStyle:ParameterNumber ON
public static String substituteVariables(String pattern, Map variables) {
return substituteVariables(pattern, new IvyVariableContainerImpl(variables), new Stack());
}
public static String substituteVariables(String pattern, IvyVariableContainer variables) {
return substituteVariables(pattern, variables, new Stack());
}
private static String substituteVariables(
String pattern, IvyVariableContainer variables, Stack substituting) {
// if you supply null, null is what you get
if (pattern == null) {
return null;
}
Matcher m = VAR_PATTERN.matcher(pattern);
boolean useVariables = false;
StringBuffer sb = null;
while (m.find()) {
if (!useVariables) {
useVariables = true;
sb = new StringBuffer();
}
String var = m.group(1);
String val = (String) variables.getVariable(var);
if (val != null) {
int index = substituting.indexOf(var);
if (index != -1) {
List cycle = new ArrayList(substituting.subList(index, substituting.size()));
cycle.add(var);
throw new IllegalArgumentException("cyclic variable definition: cycle = "
+ cycle);
}
substituting.push(var);
val = substituteVariables(val, variables, substituting);
substituting.pop();
} else {
val = m.group();
}
m.appendReplacement(sb, val.replaceAll("\\\\", "\\\\\\\\")
.replaceAll("\\$", "\\\\\\$"));
}
if (useVariables) {
m.appendTail(sb);
return sb.toString();
} else {
return pattern;
}
}
public static String substituteTokens(String pattern, Map tokens) {
Map tokensCopy = new HashMap(tokens);
if (tokensCopy.containsKey(ORGANISATION_KEY) && !tokensCopy.containsKey(ORGANISATION_KEY2)) {
tokensCopy.put(ORGANISATION_KEY2, tokensCopy.get(ORGANISATION_KEY));
}
if (tokensCopy.containsKey(ORGANISATION_KEY) && !tokensCopy.containsKey(ORGANISATION_PATH_KEY)) {
String org = (String) tokensCopy.get(ORGANISATION_KEY);
tokensCopy.put(ORGANISATION_PATH_KEY, org == null ? "" : org.replace('.', '/'));
}
StringBuffer buffer = new StringBuffer();
char[] chars = pattern.toCharArray();
StringBuffer optionalPart = null;
StringBuffer tokenBuffer = null;
boolean insideOptionalPart = false;
boolean insideToken = false;
boolean tokenSeen = false;
boolean tokenHadValue = false;
for (int i = 0; i < chars.length; i++) {
switch (chars[i]) {
case '(':
if (insideOptionalPart) {
throw new IllegalArgumentException(
"invalid start of optional part at position " + i + " in pattern "
+ pattern);
}
optionalPart = new StringBuffer();
insideOptionalPart = true;
tokenSeen = false;
tokenHadValue = false;
break;
case ')':
if (!insideOptionalPart || insideToken) {
throw new IllegalArgumentException(
"invalid end of optional part at position " + i + " in pattern "
+ pattern);
}
if (tokenHadValue) {
buffer.append(optionalPart.toString());
} else if (!tokenSeen) {
buffer.append('(').append(optionalPart.toString()).append(')');
}
insideOptionalPart = false;
break;
case '[':
if (insideToken) {
throw new IllegalArgumentException("invalid start of token at position "
+ i + " in pattern " + pattern);
}
tokenBuffer = new StringBuffer();
insideToken = true;
break;
case ']':
if (!insideToken) {
throw new IllegalArgumentException("invalid end of token at position " + i
+ " in pattern " + pattern);
}
String token = tokenBuffer.toString();
Object tokenValue = tokensCopy.get(token);
String value = (tokenValue == null) ? null : tokenValue.toString();
if (insideOptionalPart) {
tokenHadValue = (value != null) && (value.length() > 0);
optionalPart.append(value);
} else {
if (value == null) { // the token wasn't set, it's kept as is
value = "[" + token + "]";
}
buffer.append(value);
}
insideToken = false;
tokenSeen = true;
break;
default:
if (insideToken) {
tokenBuffer.append(chars[i]);
} else if (insideOptionalPart) {
optionalPart.append(chars[i]);
} else {
buffer.append(chars[i]);
}
break;
}
}
if (insideToken) {
throw new IllegalArgumentException("last token hasn't been closed in pattern "
+ pattern);
}
if (insideOptionalPart) {
throw new IllegalArgumentException("optional part hasn't been closed in pattern "
+ pattern);
}
return buffer.toString();
}
public static String substituteVariable(String pattern, String variable, String value) {
StringBuffer buf = new StringBuffer(pattern);
substituteVariable(buf, variable, value);
return buf.toString();
}
public static void substituteVariable(StringBuffer buf, String variable, String value) {
String from = "${" + variable + "}";
int fromLength = from.length();
for (int index = buf.indexOf(from); index != -1; index = buf.indexOf(from, index)) {
buf.replace(index, index + fromLength, value);
}
}
public static String substituteToken(String pattern, String token, String value) {
StringBuffer buf = new StringBuffer(pattern);
substituteToken(buf, token, value);
return buf.toString();
}
public static void substituteToken(StringBuffer buf, String token, String value) {
String from = getTokenString(token);
int fromLength = from.length();
for (int index = buf.indexOf(from); index != -1; index = buf.indexOf(from, index)) {
buf.replace(index, index + fromLength, value);
}
}
public static String getTokenString(String token) {
return "[" + token + "]";
}
public static String substituteParams(String pattern, Map params) {
return substituteParams(pattern, new IvyVariableContainerImpl(params), new Stack());
}
private static String substituteParams(
String pattern, IvyVariableContainer params, Stack substituting) {
// TODO : refactor this with substituteVariables
// if you supply null, null is what you get
if (pattern == null) {
return null;
}
Matcher m = PARAM_PATTERN.matcher(pattern);
StringBuffer sb = new StringBuffer();
while (m.find()) {
String var = m.group(1);
String val = (String) params.getVariable(var);
if (val != null) {
int index = substituting.indexOf(var);
if (index != -1) {
List cycle = new ArrayList(substituting.subList(index, substituting.size()));
cycle.add(var);
throw new IllegalArgumentException("cyclic param definition: cycle = " + cycle);
}
substituting.push(var);
val = substituteVariables(val, params, substituting);
substituting.pop();
} else {
val = m.group();
}
m
.appendReplacement(sb, val.replaceAll("\\\\", "\\\\\\\\").replaceAll("\\@",
"\\\\\\@"));
}
m.appendTail(sb);
return sb.toString();
}
/**
* This class returns the original name of the artifact 'on demand'. This is done to avoid
* having to read the cached datafile containing the original location of the artifact if we
* don't need it.
*/
private static class OriginalArtifactNameValue {
// module properties
private String org;
private String moduleName;
private String branch;
private String revision;
private Map extraModuleAttributes;
// artifact properties
private String artifactName;
private String artifactType;
private String artifactExt;
private Map extraArtifactAttributes;
// cached origin;
private ArtifactOrigin origin;
public OriginalArtifactNameValue(String org, String moduleName, String branch,
String revision, String artifactName, String artifactType, String artifactExt,
Map extraModuleAttributes, Map extraArtifactAttributes) {
this.org = org;
this.moduleName = moduleName;
this.branch = branch;
this.revision = revision;
this.artifactName = artifactName;
this.artifactType = artifactType;
this.artifactExt = artifactExt;
this.extraModuleAttributes = extraModuleAttributes;
this.extraArtifactAttributes = extraArtifactAttributes;
}
/**
* @param origin
*/
public OriginalArtifactNameValue(ArtifactOrigin origin) {
this.origin = origin;
}
// Called by substituteTokens only if the original artifact name is needed
public String toString() {
if (origin == null) {
ModuleRevisionId revId = ModuleRevisionId.newInstance(org, moduleName, branch,
revision, extraModuleAttributes);
Artifact artifact = new DefaultArtifact(revId, null, artifactName, artifactType,
artifactExt, extraArtifactAttributes);
// TODO cache: see how we could know which actual cache manager to use, since this
// will fail when using a resolver in a chain with a specific cache manager
RepositoryCacheManager cacheManager = IvyContext.getContext().getSettings()
.getResolver(revId).getRepositoryCacheManager();
origin = cacheManager.getSavedArtifactOrigin(artifact);
if (ArtifactOrigin.isUnknown(origin)) {
Message.debug("no artifact origin found for " + artifact + " in "
+ cacheManager);
return null;
}
}
if (ArtifactOrigin.isUnknown(origin)) {
return null;
}
// we assume that the original filename is the last part of the original file location
String location = origin.getLocation();
int lastPathIndex = location.lastIndexOf('/');
if (lastPathIndex == -1) {
lastPathIndex = location.lastIndexOf('\\');
}
int lastColonIndex = location.lastIndexOf('.');
return location.substring(lastPathIndex + 1, lastColonIndex);
}
}
public static String getTokenRoot(String pattern) {
int index = pattern.indexOf('[');
if (index == -1) {
return pattern;
} else {
// it could be that pattern is something like "lib/([optional]/)[module]"
// we don't want the '(' in the result
int optionalIndex = pattern.indexOf('(');
if (optionalIndex >= 0) {
index = Math.min(index, optionalIndex);
}
return pattern.substring(0, index);
}
}
public static String getFirstToken(String pattern) {
if (pattern == null) {
return null;
}
int startIndex = pattern.indexOf('[');
if (startIndex == -1) {
return null;
}
int endIndex = pattern.indexOf(']', startIndex);
if (endIndex == -1) {
return null;
}
return pattern.substring(startIndex + 1, endIndex);
}
}
| |
/**
* JLibs: Common Utilities for Java
* Copyright (C) 2009 Santhosh Kumar T <santhosh.tekuri@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.django.acquabooks.io;
import org.django.acquabooks.utils.OS;
import java.io.PrintStream;
/**
* Ansi coloring support is provided by this class. <p> To print "hello ansi world" in bold with blue foreground and white background: <pre> Ansi ansi = new
* Ansi(Ansi.Attribute.BRIGHT, Ansi.Color.BLUE, Ansi.Color.WHITE); ansi.{@link #out(String) out}("hello ansi world") </pre>
*
* same can be done as below: <pre> String msg = ansi.{@link #colorize(String) colorize}("hello ansi world"); // msg is original string wrapped with ansi
* control sequences System.out.println(msg); </pre>
*
* <b>Ansi Support:</b> <p> Ansi might not be supported on all systems. Ansi is mostly supported by all unix operating systems. <br><br> {@link Ansi#SUPPORTED}
* is a final boolean, that can be used to check whether your console supports Ansi format; <br><br> Ansi class uses simple checks to decide whether ansi is
* supported or not. Sometimes it may do wrong guess. In such cases you can override its decision using following system property: <code>-DAnsi=true</code> or
* <code>-DAnsi=false</code> <br><br> if {@link Ansi#SUPPORTED} is false, any ansi method will not produce ansi control sequences. so you can safely use:
* <code>ansi.out("hello ansi world")</code> irrespective of ansi is supported or not. if ansi is not supported, this will simply do
* <code>System.out.print("hello ansi world")</code>
*
* @author Santhosh Kumar T
*/
public class Ansi {
/**
* specifies whether ansi is supported or not. <p><br> when this is false, it doesn't colorize given strings, rather than simply returns the given strings
* <p><br> It tries best effort to guess whether ansi is supported or not. But you can override this value using system property "Ansi" (-DAnsi=true/false)
*/
public static final boolean SUPPORTED = Boolean.getBoolean("Ansi") || OS.get().isUnix() && System.console() != null;
/**
* this enum represents the attribute of text
*/
public enum Attribute {
/**
* Reset All Attributes (return to normal mode)
*/
NORMAL(0),
/**
* Usually turns on BOLD
*/
BRIGHT(1),
DIM(2),
UNDERLINE(4),
BLINK(5),
/**
* Reverse video on
*/
REVERSE(7),
/**
* Concealed on
*/
HIDDEN(8);
private final String value;
private Attribute(final int value) {
this.value = String.valueOf(value);
}
public String toString() {
return "" + value;
}
}
/**
* this enum represents the color of text
*/
public enum Color {
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE
}
public final static class AnsiColor {
public final static AnsiColor BLACK = new AnsiColor(Color.BLACK);
public final static AnsiColor RED = new AnsiColor(Color.RED);
public final static AnsiColor GREEN = new AnsiColor(Color.GREEN);
public final static AnsiColor YELLOW = new AnsiColor(Color.YELLOW);
public final static AnsiColor BLUE = new AnsiColor(Color.BLUE);
public final static AnsiColor MAGENTA = new AnsiColor(Color.MAGENTA);
public final static AnsiColor CYAN = new AnsiColor(Color.CYAN);
public final static AnsiColor WHITE = new AnsiColor(Color.WHITE);
private final int _colorIndex;
private final Color _standardColor;
public AnsiColor(final int colorIndex) {
_colorIndex = colorIndex;
_standardColor = null;
}
public AnsiColor(final Color standardColor) {
_colorIndex = -1;
_standardColor = standardColor;
}
public final int getColorIndex() {
return _colorIndex;
}
public final boolean isStandardColor() {
return _standardColor != null;
}
public final Color getStandardColor() {
return _standardColor;
}
public static AnsiColor forStandardColor(final Color color) {
if (color == null) {
return null;
}
switch (color) {
case BLACK:
return BLACK;
case RED:
return RED;
case GREEN:
return GREEN;
case YELLOW:
return YELLOW;
case BLUE:
return BLUE;
case MAGENTA:
return MAGENTA;
case CYAN:
return CYAN;
case WHITE:
return WHITE;
default:
return new AnsiColor(color);
}
}
}
private static final String PREFIX = "\u001b["; //NOI18N
private static final String SUFFIX = "m";
private static final String XTERM_256_SEPARATOR = "5;";
private static final String SEPARATOR = ";";
private static final String END = PREFIX + SUFFIX;
private String start = "";
/**
* Creates new instanceof Ansi.
*
* @param attr attribute of text, null means don't change
* @param foreground foreground color of text, null means don't change
* @param background background color of text, null means don't change
*/
public Ansi(final Attribute attr, final Color foreground, final Color background) {
init(attr, AnsiColor.forStandardColor(foreground), AnsiColor.forStandardColor(background));
}
/**
* Creates new instanceof Ansi.
*
* @param attr attribute of text, null means don't change
* @param foreground foreground color of text, null means don't change
* @param background background color of text, null means don't change
*/
public Ansi(final Attribute attr, final AnsiColor foreground, final AnsiColor background) {
init(attr, foreground, background);
}
/**
* Creates new instanceof of ansi with specified format.<p> The format syntax is
* <pre>
* Attribute[;Foreground[;Background]]
* </pre>
* i.e, semicolon(;) separated values, where tokens are attribute, foreground and background respectively.<br> if any non-trailing token in value is null,
* you still need to specify empty value. for example:
* <pre>
* DIM;;GREEN # foreground is not specified
* </pre>
*/
public Ansi(final String format) {
final String[] tokens = format.split(";");
Ansi.Attribute attribute = null;
try {
if (tokens.length > 0 && tokens[0].length() > 0) {
attribute = Ansi.Attribute.valueOf(tokens[0]);
}
}
catch (IllegalArgumentException ex) {
ex.printStackTrace();
}
Ansi.Color foreground = null;
try {
if (tokens.length > 1 && tokens[1].length() > 0) {
foreground = Ansi.Color.valueOf(tokens[1]);
}
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
Ansi.Color background = null;
try {
if (tokens.length > 2 && tokens[2].length() > 0) {
background = Ansi.Color.valueOf(tokens[2]);
}
}
catch (IllegalArgumentException e) {
e.printStackTrace();
}
init(attribute, AnsiColor.forStandardColor(foreground), AnsiColor.forStandardColor(background));
}
private void init(final Attribute attr, final AnsiColor foreground, final AnsiColor background) {
final StringBuilder buff = new StringBuilder();
if (attr != null) {
buff.append(attr);
}
if (foreground != null) {
if (buff.length() > 0) {
buff.append(SEPARATOR);
}
if (foreground.isStandardColor()) {
buff.append(30 + foreground._standardColor.ordinal());
}
else {
buff.append(38).append(SEPARATOR).append(XTERM_256_SEPARATOR).append(foreground._colorIndex);
}
}
if (background != null) {
if (buff.length() > 0) {
buff.append(SEPARATOR);
}
if (background.isStandardColor()) {
buff.append(40 + background._standardColor.ordinal());
}
else {
buff.append(48).append(SEPARATOR).append(XTERM_256_SEPARATOR).append(background._colorIndex);
}
}
buff.insert(0, PREFIX);
buff.append(SUFFIX);
start = buff.toString();
}
/**
* The string representation of this object. This string will be the same that is expected by {@link #Ansi(String)}
*
* @return string representation of this object
*/
@Override
public String toString() {
Attribute attr = null;
Color foreground = null;
Color background = null;
for (final String token : start.substring(PREFIX.length(), start.length() - SUFFIX.length()).split(SEPARATOR)) {
final int i = Integer.parseInt(token);
if (i < 30) {
for (final Attribute value : Attribute.values()) {
if (value.toString().equals(token)) {
attr = value;
break;
}
}
}
else if (i < 40) {
foreground = Color.values()[i - 30];
}
else {
background = Color.values()[i - 40];
}
}
final StringBuilder buff = new StringBuilder();
if (attr != null) {
buff.append(attr.name());
}
buff.append(';');
if (foreground != null) {
buff.append(foreground.name());
}
buff.append(';');
if (background != null) {
buff.append(background.name());
}
int end = buff.length() - 1;
while (end >= 0 && buff.charAt(end) == ';') {
end--;
}
return buff.substring(0, end + 1);
}
/**
* Wraps given <code>message</code> with special ansi control sequences and returns it
*/
public String colorize(final String message) {
if (SUPPORTED) {
final StringBuilder buff = new StringBuilder(start.length() + message.length() + END.length());
buff.append(start).append(message).append(END);
return buff.toString();
}
else {
return message;
}
}
/*-------------------------------------------------[ Printing ]---------------------------------------------------*/
/**
* Prints colorized {@code message} to specified {@code ps}. <p> if {@link #SUPPORTED} is false, it prints raw {@code message} to {@code ps}
*
* @param ps stream to print
* @param message message to be colorized
*/
public void print(final PrintStream ps, final String message) {
if (SUPPORTED) {
ps.print(start);
}
ps.print(message);
if (SUPPORTED) {
ps.print(END);
}
}
/**
* Prints colorized {@code message} to specified {@code ps} followed by newline. <p> if {@link #SUPPORTED} is false, it prints raw {@code message} to {@code
* ps} followed by newline.
*
* @param ps stream to print
* @param message message to be colorized
*/
public void println(final PrintStream ps, final String message) {
print(ps, message);
ps.println();
}
/**
* Prints formatted and colorized {@code message} to specified {@code ps}. <p> if {@link #SUPPORTED} is false, it prints formatted {@code message} to {@code
* ps}
*
* @param ps stream to print
* @param format A format string whose output to be colorized
* @param args Arguments referenced by the format specifiers in the format
*/
public void format(final PrintStream ps, final String format, final Object... args) {
if (SUPPORTED) {
ps.print(start);
}
ps.format(format, args);
if (SUPPORTED) {
ps.print(END);
}
}
/*-------------------------------------------------[ System.out ]---------------------------------------------------*/
/**
* Prints colorized {@code message} to {@link System#out}
*
* @param message message to be colorized
*/
public void out(final String message) {
print(System.out, message);
}
/**
* Prints colorized {@code message} to {@link System#out} followed by newline
*
* @param message message to be colorized
*/
public void outLine(final String message) {
println(System.out, message);
}
/**
* Prints formatted and colorized {@code format} to {@link System#out}
*
* @param format A format string whose output to be colorized
* @param args Arguments referenced by the format specifiers in the format
*/
public void outFormat(final String format, final Object... args) {
format(System.out, format, args);
}
/*-------------------------------------------------[ System.err ]---------------------------------------------------*/
/**
* Prints colorized {@code message} to {@link System#err}
*
* @param message message to be colorized
*/
public void err(final String message) {
print(System.err, message);
}
/**
* Prints colorized {@code message} to {@link System#err} followed by newline
*
* @param message message to be colorized
*/
public void errLine(final String message) {
print(System.err, message);
}
/**
* Prints formatted and colorized {@code format} to {@link System#err}
*
* @param format A format string whose output to be colorized
* @param args Arguments referenced by the format specifiers in the format
*/
public void errFormat(final String format, final Object... args) {
format(System.err, format, args);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.server.remoting.davex;
import junit.framework.TestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/** <code>DiffParserTest</code>... */
public class DiffParserTest extends TestCase {
public void testSetProperty() throws IOException, DiffException {
ArrayList<String> l = new ArrayList<String>();
l.add("\"simple string\"");
l.add("2345");
l.add("true");
l.add("false");
l.add("234.3455");
l.add("null");
for (final String value : l) {
String diff = "^/a/prop : " + value;
DummyDiffHandler handler = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(targetPath, "/a/prop");
assertEquals(value, diffValue);
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
}
public void testSetPropertyMissing() throws IOException,
DiffException {
ArrayList<String> l = new ArrayList<String>();
l.add("");
l.add(null);
for (String value : l) {
String diff = "^/a/prop : " + ((value == null) ? "" : value);
DummyDiffHandler handler = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(targetPath, "/a/prop");
assertTrue(diffValue == null || "".equals(diffValue));
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
}
public void testSetPropertyWithUnicodeChars() throws IOException,
DiffException {
final String value = "\"String value containing \u2355\u8723 unicode chars.\"";
String diff = "^/a/prop : " + value;
DiffHandler handler = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(targetPath, "/a/prop");
assertEquals(value, diffValue);
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
public void testSetPropertyWithTrailingLineSep() throws IOException,
DiffException {
final String value = "\"String value ending with \r\r\n\n\r\n.\"";
String diff = "^/a/prop : " + value;
DiffHandler handler = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(targetPath, "/a/prop");
assertEquals(value, diffValue);
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
public void testSetPropertyWithSpecialChar() throws IOException, DiffException {
final String value = "+abc \\r+ \\n-ab >c \r\\r\\n+";
String diff = "^/a/prop : " + value;
DiffHandler handler = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(targetPath, "/a/prop");
assertEquals(value, diffValue);
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
public void testSetPropertyUnterminatedString() throws IOException,
DiffException {
final String value = "\"String value ending with \r\r\n\n\r\n.";
String diff = "^/a/prop : " + value;
DiffHandler handler = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(targetPath, "/a/prop");
assertEquals(value, diffValue);
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
public void testSetPropertyWithUnescapedAction() throws IOException,
DiffException {
String diff = "^abc : \r+def : \n-ghi : \r\n^jkl : \n\r>mno : \n";
DiffHandler handler = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
assertEquals("def", targetPath);
assertEquals("", diffValue);
}
@Override
public void setProperty(String targetPath, String diffValue) {
assertTrue("abc".equals(targetPath) || "jkl".equals(targetPath));
assertEquals("", diffValue);
}
@Override
public void remove(String targetPath, String diffValue) {
assertEquals("ghi", targetPath);
assertEquals("", diffValue);
}
@Override
public void move(String targetPath, String diffValue) {
assertEquals("mno", targetPath);
assertEquals("\n", diffValue);
}
};
DiffParser parser = new DiffParser(handler);
parser.parse(diff);
}
public void testValidDiffs() throws IOException, DiffException {
List<String[]> l = new ArrayList<String[]>();
// unquoted string value
l.add(new String[] {"+/a/b : 134", "/a/b","134"});
l.add(new String[] {"+/a/b : 2.3", "/a/b","2.3"});
l.add(new String[] {"+/a/b : true", "/a/b","true"});
// quoted string value
l.add(new String[] {"+/a/b : \"true\"", "/a/b","\"true\""});
l.add(new String[] {"+/a/b : \"string value containing \u3456 unicode char.\"", "/a/b","\"string value containing \u3456unicode char.\""});
// value consisting of quotes
l.add(new String[] {"+/a/b : \"", "/a/b","\""});
l.add(new String[] {"+/a/b : \"\"", "/a/b","\"\""});
// value consisting of single
l.add(new String[] {"+/a/b : '", "/a/b","'"});
l.add(new String[] {"+/a/b : ''''", "/a/b","''''"});
// value consisting of space(s) only
l.add(new String[] {"+/a/b : ", "/a/b"," "});
l.add(new String[] {"+/a/b : ", "/a/b"," "});
// value consisting of line separators only
l.add(new String[] {"+/a/b : \n", "/a/b","\n"});
l.add(new String[] {"+/a/b : \r", "/a/b","\r"});
l.add(new String[] {"+/a/b : \r\n", "/a/b","\r\n"});
l.add(new String[] {"+/a/b : \r\n\n\r", "/a/b","\r\n\n\r"});
// path containing white space
l.add(new String[] {"+/a /b : 123", "/a /b","123"});
l.add(new String[] {"+/a\r\t/b : 123", "/a\r\t/b","123"});
// path having trailing white space
l.add(new String[] {"+/a/b : 123", "/a/b","123"});
l.add(new String[] {"+/a/b\r : 123", "/a/b\r","123"});
l.add(new String[] {"+/a/b\r\n\n\r\n: 123", "/a/b\r\n\n\r\n","123"});
// path containing reserved characters
l.add(new String[] {"++abc+ : val", "+abc+","val"});
l.add(new String[] {"++++++ : val", "+++++","val"});
// value containing reserved characters
l.add(new String[] {"+/a/b : +", "/a/b","+"});
l.add(new String[] {"+/a/b : +->+-", "/a/b","+->+-"});
l.add(new String[] {"+/a/b : \"+->+-\"", "/a/b","\"+->+-\""});
// other white space than ' ' used as key-value separator
l.add(new String[] {"+/a/b :\r123", "/a/b","123"});
l.add(new String[] {"+/a/b\r: 123", "/a/b","123"});
l.add(new String[] {"+/a/b\r:\r123", "/a/b","123"});
l.add(new String[] {"+/a/b\r:\n123", "/a/b","123"});
l.add(new String[] {"+/a/b\t:\r123", "/a/b","123"});
l.add(new String[] {"+/a/b\t:\t123", "/a/b","123"});
// path containing colon
l.add(new String[] {"+/a:b/c:d : 123", "/a:b/c:d","123"});
// value starting with colon -> ok
l.add(new String[] {"+/a/b : : val", "/a/b",": val"});
// missing value
l.add(new String[] {"+/a/b : ", "/a/b", ""});
l.add(new String[] {"+/a/b :\n", "/a/b", ""});
for (final String[] strs : l) {
DiffHandler hndl = new DummyDiffHandler() {
@Override
public void setProperty(String targetPath, String diffValue) {
assertEquals(strs[1], targetPath);
assertEquals(strs[2], diffValue);
}
};
DiffParser parser = new DiffParser(hndl);
parser.parse(strs[0]);
}
List<String> l2 = new ArrayList<String>();
// multiple commands
l2.add("+abc :\n\n+def : val");
l2.add("+abc :\n\n+def : val\n");
l2.add("+abc : \r+def : val");
l2.add("+/a/b : val\r+abc : \r ");
l2.add("+/a/b : val\r+abc :\n\n ");
// missing value in the last action.
l2.add("+/a/b : \r+abc :\n");
l2.add("+/a/b : \\r+abc : abc\r\r+abc :\r");
l2.add("+abc :\n\n+def : val\r\r>abc : ");
for (String diff : l2) {
final List<String> li = new ArrayList<String>();
DiffHandler dh = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
li.add(diffValue);
}
};
DiffParser parser = new DiffParser(dh);
parser.parse(diff);
assertEquals(2, li.size());
}
}
public void testSeparatorLines() throws IOException, DiffException {
String diff = "+abc :\n\n+val : val";
DiffHandler dh = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
if ("abc".equals(targetPath)) {
assertEquals("", diffValue);
} else {
assertEquals("val", diffValue);
}
}
};
new DiffParser(dh).parse(diff);
diff = "+abc :\n+val : val";
dh = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
assertEquals("+val : val", diffValue);
}
};
new DiffParser(dh).parse(diff);
// TODO: check again: currently all line separation chars before an diff-char are ignored unless they are escaped in way the handler understands (e.g. JSON does: \\r for \r).
diff = "+abc :\r\r\r+def : val";
dh = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
if ("abc".equals(targetPath)) {
assertEquals("", diffValue);
} else {
assertEquals("val", diffValue);
}
}
};
new DiffParser(dh).parse(diff);
diff = "+abc : val\r+def :\n\n ";
dh = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
if ("abc".equals(targetPath)) {
assertEquals("val", diffValue);
} else {
assertEquals("\n ", diffValue);
}
}
};
new DiffParser(dh).parse(diff);
}
public void testUnicodeLineSep() throws IOException, DiffException {
String diff = "+abc : val" + new String(new byte[] {Character.LINE_SEPARATOR}, "utf-8") + "+abc : val";
DiffHandler dh = new DummyDiffHandler() {
@Override
public void addNode(String targetPath, String diffValue) {
assertEquals("abc", targetPath);
assertEquals("val", diffValue);
}
};
new DiffParser(dh).parse(diff);
}
public void testInvalidDiff() throws IOException, DiffException {
List<String> l = new ArrayList<String>();
l.add("");
// path, separator and value missing
l.add("+");
l.add("+/a/b : val\r+");
// path starting with white space, separator and value missing
l.add("+\n");
// separator and value missing
l.add("+/a/b");
l.add("+/a/b : val\r+abc\n");
l.add("+/a/b :");
// invalid for separator and value are missing (all : and white space
// is interpreted as part of the path.
l.add("+/a/b:");
l.add("+/a/b:val");
l.add("+/a/b: val");
l.add("+/a/b:\rval");
l.add("+/a/b :: val");
// diff starting with white space
l.add(" +/a/b: val");
l.add("\r\r\r\r\r\r+/a/b: val");
// key starting with white space
l.add("+\r/a/b : 123");
l.add("+ /a/b : 123");
// key starting with colon
l.add("+:/a/b : 123");
for (String diff : l) {
try {
DiffParser parser = new DiffParser(new DummyDiffHandler());
parser.parse(diff);
fail(diff + " is not a valid diff string -> should throw DiffException.");
} catch (DiffException e) {
// ok
}
}
}
private class DummyDiffHandler implements DiffHandler {
public void addNode(String targetPath, String diffValue)
throws DiffException {
// does nothing
}
public void setProperty(String targetPath, String diffValue)
throws DiffException {
// does nothing
}
public void remove(String targetPath, String diffValue)
throws DiffException {
// does nothing
}
public void move(String targetPath, String diffValue) throws DiffException {
// does nothing
}
}
}
| |
package DataManagement;
import DataManagement.DataSets.DeviceDataSet;
import DataManagement.DataSets.SyncListDataSet;
import Misc.Logger.ADMLogger;
import UI.TableController;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class LocalDataManager extends SpreadSheetManager{
private final static String g_dataFolder = "datafiles/";
private final static String g_mobileDeviceDataFile = "mobdevlist.dat";
private final static String g_tabletDeviceDataFile = "tabdevlist.dat";
public DeviceDataSet m_mobDeviceDataSet;
public DeviceDataSet m_tabDeviceDataSet;
private SyncListDataSet m_syncListDataSet;
private SyncThread st;
private boolean areDevicesUpdating;
public boolean areDevicesCurrentlyUpdating() {return areDevicesUpdating;}
private TableController tableController; // needed in order to update table when devices are updated.
public LocalDataManager() throws IOException {
super();
/*ADMLogger.LogMessage("User name: " + getUserName());
ADMLogger.LogMessage("User surname: " + getUserSurname());
ADMLogger.LogMessage("User full name: " + getUserFullName());
ADMLogger.LogMessage("User email: " + getUserEmail());
ADMLogger.LogMessage("User handle: " + getUserHandle());*/
areDevicesUpdating = false;
m_mobDeviceDataSet = new DeviceDataSet();
m_tabDeviceDataSet = new DeviceDataSet();
m_syncListDataSet = new SyncListDataSet(-1);
m_syncListDataSet = getFullSyncList();
try {
m_mobDeviceDataSet = loadDataFile(g_mobileDeviceDataFile);
m_tabDeviceDataSet = loadDataFile(g_tabletDeviceDataFile);
}
catch(IOException e){
ADMLogger.LogWarning("File not found. Creating and syncing new data.");
resetDeviceDataSet();
}
syncLocalData();
st = new SyncThread(this);
//updateSyncList(114, "TEST_FIELD");
ADMLogger.LogMessage("Finished.");
}
@Override
protected void finalize() throws Throwable {
saveDataFile(g_mobileDeviceDataFile, m_mobDeviceDataSet);
saveDataFile(g_tabletDeviceDataFile, m_tabDeviceDataSet);
super.finalize();
}
public void updateSingleDevice(DeviceDataNode locallyUpdatedDevice){
m_syncListDataSet.setSyncListTimestamp(locallyUpdatedDevice);
while(areDevicesUpdating){
try{
Thread.sleep(1000);
}
catch(InterruptedException interex){
ADMLogger.LogError("Interrupted Exception was thrown...");
}
}
updateDeviceInSheet(locallyUpdatedDevice, locallyUpdatedDevice.rowID);
}
public void enableAutoUpdate(TableController tableController){
this.tableController = tableController;
ADMLogger.LogMessage("Auto update has been enabled.");
st.start();
}
public void disableAutoUpdate(){
ADMLogger.LogMessage("Auto update has been disabled.");
st.interrupt();
try {
st.join();
ADMLogger.LogMessage("Auto update thread has joined main thread successfully.");
} catch (InterruptedException e) {
e.printStackTrace();
ADMLogger.LogError("Auto update thread failed to join main thread...");
}
}
public void syncLocalDataFromRemote(){
try{
areDevicesUpdating = true;
boolean areThereChanges = syncLocalData();
if(areThereChanges){
tableController.resetDeviceList(this); // sets new list
tableController.resetDevicesForView(); // renders new devices
// TODO: currently, this resets whole lists so if user has any devices filtered he wont see them filtered anymore.
// This behaviour is not expected and should be fixed asap.
// call filltable in table manager
}
}
catch (IOException e){
e.printStackTrace();
ADMLogger.LogMessage("\nSyncinc has failed. Postponing....");
}
catch (NullPointerException pointerE){
pointerE.printStackTrace();
ADMLogger.LogMessage("\nNull was found. (pls check if tableController was set correctly)");
}
finally{
areDevicesUpdating = false;
}
}
private boolean syncLocalData() throws IOException{
int i = 1; // row index
boolean mob_hasChanged = false;
// if something did change - save mobile data
// same goes for tablets below
m_syncListDataSet = getFullSyncList();
for(DeviceDataNode dnode : m_mobDeviceDataSet.getData()){
if(dnode != null){
if(!m_syncListDataSet.isUpToDate(dnode)){
dnode = getDeviceFromRow(i + 1);
// Note for myself:
// dnode now points to the new object
m_syncListDataSet.setDeviceTimestamp(dnode);
m_mobDeviceDataSet.replaceDataNode(
dnode.getDevicePublicID(),
dnode
);
mob_hasChanged = true;
ADMLogger.LogMessage("[LOCAL] Device with ID: " + dnode.getDevicePublicID() + " was updated.");
}
i++;
}
}
boolean tab_hasChanged = false;
for(DeviceDataNode dnode : m_tabDeviceDataSet.getData()){
if(dnode != null){
if(!m_syncListDataSet.isUpToDate(dnode)){
dnode = getDeviceFromRow(i + 1);
m_syncListDataSet.setDeviceTimestamp(dnode);
m_tabDeviceDataSet.replaceDataNode(
dnode.getDevicePublicID() - 5000,
dnode
);
tab_hasChanged = true;
ADMLogger.LogMessage("[LOCAL] Device with ID: " + dnode.getDevicePublicID() + " was updated.");
}
i++;
}
}
// Maybe add settings - autosave. Autosave will enable this saving.
// If disabled, save on exit.
if(mob_hasChanged){
saveDataFile(g_mobileDeviceDataFile, m_mobDeviceDataSet);
}
else{
ADMLogger.LogMessage("Mobile data is up to date.");
}
if(tab_hasChanged){
saveDataFile(g_tabletDeviceDataFile, m_tabDeviceDataSet);
}
else{
ADMLogger.LogMessage("Tablet data is up to date.");
}
return mob_hasChanged || tab_hasChanged;
}
public void updateDeviceUser(String uniqueID){
DeviceDataNode deviceToUpdate = null;
deviceToUpdate = m_mobDeviceDataSet.getDeviceByUniqueID(uniqueID);
if(deviceToUpdate == null) {
deviceToUpdate = m_tabDeviceDataSet.getDeviceByUniqueID(uniqueID);
}
if(deviceToUpdate != null){
deviceToUpdate.setNewDeviceUser(getUserFullName() + " (" + getUserHandle() + ")");
updateSingleDevice(deviceToUpdate);
}
else{
ADMLogger.LogError("Device does not exist in database?? [unique ID: " + uniqueID + "]");
}
}
public void updateDeviceUser(DeviceDataNode deviceToUpdate){
deviceToUpdate.setNewDeviceUser(getUserFullName() + " (" + getUserHandle() + ")");
updateSingleDevice(deviceToUpdate);
}
public DeviceDataNode[] joinTwoSetsIntoArray(){
List<DeviceDataNode> listresult = new ArrayList<>();
for(DeviceDataNode ddn : m_mobDeviceDataSet.getData()){
if(ddn != null){
listresult.add(ddn);
}
}
for(DeviceDataNode ddn : m_tabDeviceDataSet.getData()){
if(ddn != null){
listresult.add(ddn);
}
}
DeviceDataNode[] finalresult = new DeviceDataNode[listresult.size()];
finalresult = listresult.toArray(finalresult);
return finalresult;
}
private void resetDeviceDataSet() throws IOException {
DeviceDataSet[] dset = getFullDeviceDataSets();
m_mobDeviceDataSet = dset[0];
m_tabDeviceDataSet = dset[1];
for(DeviceDataNode dnode : m_mobDeviceDataSet.getData()){
if(dnode != null){
ADMLogger.LogMessage("Attempting to setDeviceTimestamp for row ID: " + dnode.rowID);
m_syncListDataSet.setDeviceTimestamp(dnode);
}
}
for(DeviceDataNode dnode : m_tabDeviceDataSet.getData()){
if(dnode != null){
m_syncListDataSet.setDeviceTimestamp(dnode);
}
}
saveDataFile(g_mobileDeviceDataFile, m_mobDeviceDataSet);
saveDataFile(g_tabletDeviceDataFile, m_tabDeviceDataSet);
}
private <T> T loadDataFile(String dataFileName)
throws IOException {
T data = null;
FileInputStream fileInputStream = new FileInputStream(g_dataFolder + dataFileName);
ObjectInputStream in = new ObjectInputStream(fileInputStream);
try {
data = (T)in.readObject();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
in.close();
fileInputStream.close();
return data;
}
private void saveDataFile(String dataFileName, java.io.Serializable data){
try {
java.io.File dataFolderDir = new File(g_dataFolder);
if(!dataFolderDir.exists()){
boolean successful = dataFolderDir.mkdir();
if (successful)
{
// Add this to logs
ADMLogger.LogMessage("Directory was created successfully");
}
else
{
throw new IOException("Failed to create new dir for serialized data.");
}
}
FileOutputStream fileOut = new FileOutputStream(g_dataFolder + dataFileName);
ObjectOutputStream out = new ObjectOutputStream(fileOut);
out.writeObject(data);
out.close();
fileOut.close();
ADMLogger.LogMessage("Serialized data is saved in " + g_dataFolder + dataFileName);
}
catch(IOException ex){
ex.printStackTrace();
}
}
}
| |
/*
* Copyright 2009-present, Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.money;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.tngtech.java.junit.dataprovider.DataProvider;
import com.tngtech.java.junit.dataprovider.DataProviderRunner;
import com.tngtech.java.junit.dataprovider.UseDataProvider;
/**
* Test Money.
*/
@RunWith(DataProviderRunner.class)
public class TestMoney {
private static final CurrencyUnit GBP = CurrencyUnit.of("GBP");
private static final CurrencyUnit EUR = CurrencyUnit.of("EUR");
private static final CurrencyUnit USD = CurrencyUnit.of("USD");
private static final CurrencyUnit JPY = CurrencyUnit.of("JPY");
private static final BigDecimal BIGDEC_2_3 = new BigDecimal("2.3");
private static final BigDecimal BIGDEC_2_34 = new BigDecimal("2.34");
private static final BigDecimal BIGDEC_2_345 = new BigDecimal("2.345");
private static final BigDecimal BIGDEC_M5_78 = new BigDecimal("-5.78");
private static final Money GBP_0_00 = Money.parse("GBP 0.00");
private static final Money GBP_1_23 = Money.parse("GBP 1.23");
private static final Money GBP_2_33 = Money.parse("GBP 2.33");
private static final Money GBP_2_34 = Money.parse("GBP 2.34");
private static final Money GBP_2_35 = Money.parse("GBP 2.35");
private static final Money GBP_2_36 = Money.parse("GBP 2.36");
private static final Money GBP_5_78 = Money.parse("GBP 5.78");
private static final Money GBP_M1_23 = Money.parse("GBP -1.23");
private static final Money GBP_M5_78 = Money.parse("GBP -5.78");
private static final Money GBP_INT_MAX_PLUS1 = Money.ofMinor(GBP, ((long) Integer.MAX_VALUE) + 1);
private static final Money GBP_INT_MIN_MINUS1 = Money.ofMinor(GBP, ((long) Integer.MIN_VALUE) - 1);
private static final Money GBP_INT_MAX_MAJOR_PLUS1 = Money.ofMinor(GBP, (((long) Integer.MAX_VALUE) + 1) * 100);
private static final Money GBP_INT_MIN_MAJOR_MINUS1 = Money.ofMinor(GBP, (((long) Integer.MIN_VALUE) - 1) * 100);
private static final Money GBP_LONG_MAX_PLUS1 = Money.of(GBP, BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE));
private static final Money GBP_LONG_MIN_MINUS1 =
Money.of(GBP, BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE));
private static final Money GBP_LONG_MAX_MAJOR_PLUS1 = Money.of(GBP,
BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE).multiply(BigDecimal.valueOf(100)));
private static final Money GBP_LONG_MIN_MAJOR_MINUS1 = Money.of(GBP,
BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE).multiply(BigDecimal.valueOf(100)));
private static final Money JPY_423 = Money.parse("JPY 423");
private static final Money USD_1_23 = Money.parse("USD 1.23");
private static final Money USD_2_34 = Money.parse("USD 2.34");
private static final Money USD_2_35 = Money.parse("USD 2.35");
//-----------------------------------------------------------------------
// of(Currency,BigDecimal)
//-----------------------------------------------------------------------
@Test
public void test_factory_of_Currency_BigDecimal() {
Money test = Money.of(GBP, BIGDEC_2_34);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(234, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test
public void test_factory_of_Currency_BigDecimal_correctScale() {
Money test = Money.of(GBP, BIGDEC_2_3);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(230, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test(expected = ArithmeticException.class)
public void test_factory_of_Currency_BigDecimal_invalidScaleGBP() {
Money.of(GBP, BIGDEC_2_345);
}
@Test(expected = ArithmeticException.class)
public void test_factory_of_Currency_BigDecimal_invalidScaleJPY() {
Money.of(JPY, BIGDEC_2_3);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_BigDecimal_nullCurrency() {
Money.of((CurrencyUnit) null, BIGDEC_2_34);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_BigDecimal_nullBigDecimal() {
Money.of(GBP, (BigDecimal) null);
}
//-----------------------------------------------------------------------
// of(Currency,BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_factory_of_Currency_BigDecimal_GBP_RoundingMode_DOWN() {
Money test = Money.of(GBP, BIGDEC_2_34, RoundingMode.DOWN);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(234, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test
public void test_factory_of_Currency_BigDecimal_JPY_RoundingMode_DOWN() {
Money test = Money.of(JPY, BIGDEC_2_34, RoundingMode.DOWN);
assertEquals(JPY, test.getCurrencyUnit());
assertEquals(2, test.getAmountMinorInt());
assertEquals(0, test.getAmount().scale());
}
@Test
public void test_factory_of_Currency_BigDecimal_JPY_RoundingMode_UP() {
Money test = Money.of(JPY, BIGDEC_2_34, RoundingMode.UP);
assertEquals(JPY, test.getCurrencyUnit());
assertEquals(3, test.getAmountMinorInt());
assertEquals(0, test.getAmount().scale());
}
@Test(expected = ArithmeticException.class)
public void test_factory_of_Currency_BigDecimal_RoundingMode_UNNECESSARY() {
Money.of(JPY, BIGDEC_2_34, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_BigDecimal_RoundingMode_nullCurrency() {
Money.of((CurrencyUnit) null, BIGDEC_2_34, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_BigDecimal_RoundingMode_nullBigDecimal() {
Money.of(GBP, (BigDecimal) null, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_BigDecimal_RoundingMode_nullRoundingMode() {
Money.of(GBP, BIGDEC_2_34, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// of(Currency,double)
//-----------------------------------------------------------------------
@Test
public void test_factory_of_Currency_double() {
Money test = Money.of(GBP, 2.34d);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(234, test.getAmountMinorInt());
assertEquals(2, test.getScale());
}
@Test
public void test_factory_of_Currency_double_correctScale() {
Money test = Money.of(GBP, 2.3d);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(230, test.getAmountMinorInt());
assertEquals(2, test.getScale());
}
@Test
public void test_factory_of_Currency_double_trailingZero1() {
Money test = Money.of(GBP, 1.230d);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(BigDecimal.valueOf(123L, 2), test.getAmount());
assertEquals(2, test.getScale());
}
@Test
public void test_factory_of_Currency_double_trailingZero2() {
Money test = Money.of(GBP, 1.20d);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(BigDecimal.valueOf(120L, 2), test.getAmount());
assertEquals(2, test.getScale());
}
@Test
public void test_factory_of_Currency_double_medium() {
Money test = Money.of(GBP, 2000d);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(BigDecimal.valueOf(200000L, 2), test.getAmount());
assertEquals(2, test.getScale());
}
@Test
public void test_factory_of_Currency_double_big() {
Money test = Money.of(GBP, 200000000d);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(BigDecimal.valueOf(20000000000L, 2), test.getAmount());
assertEquals(2, test.getScale());
}
@Test(expected = ArithmeticException.class)
public void test_factory_of_Currency_double_invalidScaleGBP() {
Money.of(GBP, 2.345d);
}
@Test(expected = ArithmeticException.class)
public void test_factory_of_Currency_double_invalidScaleJPY() {
Money.of(JPY, 2.3d);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_double_nullCurrency() {
Money.of((CurrencyUnit) null, BIGDEC_2_34);
}
//-----------------------------------------------------------------------
// of(Currency,double,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_factory_of_Currency_double_GBP_RoundingMode_DOWN() {
Money test = Money.of(GBP, 2.34d, RoundingMode.DOWN);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(234, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test
public void test_factory_of_Currency_double_JPY_RoundingMode_DOWN() {
Money test = Money.of(JPY, 2.34d, RoundingMode.DOWN);
assertEquals(JPY, test.getCurrencyUnit());
assertEquals(2, test.getAmountMinorInt());
assertEquals(0, test.getAmount().scale());
}
@Test
public void test_factory_of_Currency_double_JPY_RoundingMode_UP() {
Money test = Money.of(JPY, 2.34d, RoundingMode.UP);
assertEquals(JPY, test.getCurrencyUnit());
assertEquals(3, test.getAmountMinorInt());
assertEquals(0, test.getAmount().scale());
}
@Test(expected = ArithmeticException.class)
public void test_factory_of_Currency_double_RoundingMode_UNNECESSARY() {
Money.of(JPY, 2.34d, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_double_RoundingMode_nullCurrency() {
Money.of((CurrencyUnit) null, 2.34d, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_factory_of_Currency_double_RoundingMode_nullRoundingMode() {
Money.of(GBP, 2.34d, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// ofMajor(Currency,long)
//-----------------------------------------------------------------------
@Test
public void test_factory_ofMajor_Currency_long() {
Money test = Money.ofMajor(GBP, 234);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(23400, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test(expected = NullPointerException.class)
public void test_factory_ofMajor_Currency_long_nullCurrency() {
Money.ofMajor((CurrencyUnit) null, 234);
}
//-----------------------------------------------------------------------
// ofMinor(Currency,long)
//-----------------------------------------------------------------------
@Test
public void test_factory_ofMinor_Currency_long() {
Money test = Money.ofMinor(GBP, 234);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(234, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test(expected = NullPointerException.class)
public void test_factory_ofMinor_Currency_long_nullCurrency() {
Money.ofMinor((CurrencyUnit) null, 234);
}
//-----------------------------------------------------------------------
// zero(Currency)
//-----------------------------------------------------------------------
@Test
public void test_factory_zero_Currency() {
Money test = Money.zero(GBP);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(0, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test(expected = NullPointerException.class)
public void test_factory_zero_Currency_nullCurrency() {
Money.zero((CurrencyUnit) null);
}
//-----------------------------------------------------------------------
// from(BigMoneyProvider)
//-----------------------------------------------------------------------
@Test
public void test_factory_from_BigMoneyProvider() {
Money test = Money.of(BigMoney.parse("GBP 104.23"));
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(10423, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test
public void test_factory_from_BigMoneyProvider_fixScale() {
Money test = Money.of(BigMoney.parse("GBP 104.2"));
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(10420, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test(expected = ArithmeticException.class)
public void test_factory_from_BigMoneyProvider_invalidCurrencyScale() {
Money.of(BigMoney.parse("GBP 104.235"));
}
@Test(expected = NullPointerException.class)
public void test_factory_from_BigMoneyProvider_nullBigMoneyProvider() {
Money.of((BigMoneyProvider) null);
}
//-----------------------------------------------------------------------
// from(BigMoneyProvider,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_factory_from_BigMoneyProvider_RoundingMode() {
Money test = Money.of(BigMoney.parse("GBP 104.235"), RoundingMode.HALF_EVEN);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(10424, test.getAmountMinorInt());
assertEquals(2, test.getAmount().scale());
}
@Test(expected = NullPointerException.class)
public void test_factory_from_BigMoneyProvider_RoundingMode_nullBigMoneyProvider() {
Money.of((BigMoneyProvider) null, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_factory_from_BigMoneyProvider_RoundingMode_nullRoundingMode() {
Money.of(BigMoney.parse("GBP 104.235"), (RoundingMode) null);
}
//-----------------------------------------------------------------------
// total(Money...)
//-----------------------------------------------------------------------
@Test
public void test_factory_total_varargs_1() {
Money test = Money.total(GBP_1_23);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(123, test.getAmountMinorInt());
}
@Test
public void test_factory_total_array_1() {
Money[] array = new Money[] {GBP_1_23};
Money test = Money.total(array);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(123, test.getAmountMinorInt());
}
@Test
public void test_factory_total_varargs_3() {
Money test = Money.total(GBP_1_23, GBP_2_33, GBP_2_36);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(592, test.getAmountMinorInt());
}
@Test
public void test_factory_total_array_3() {
Money[] array = new Money[] {GBP_1_23, GBP_2_33, GBP_2_36};
Money test = Money.total(array);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(592, test.getAmountMinorInt());
}
@Test(expected = IllegalArgumentException.class)
public void test_factory_total_varargs_empty() {
Money.total();
}
@Test(expected = IllegalArgumentException.class)
public void test_factory_total_array_empty() {
Money[] array = new Money[0];
Money.total(array);
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_varargs_currenciesDiffer() {
try {
Money.total(GBP_2_33, JPY_423);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_array_currenciesDiffer() {
try {
Money[] array = new Money[] {GBP_2_33, JPY_423};
Money.total(array);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_factory_total_varargs_nullFirst() {
Money.total((Money) null, GBP_2_33, GBP_2_36);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_array_nullFirst() {
Money[] array = new Money[] {null, GBP_2_33, GBP_2_36};
Money.total(array);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_varargs_nullNotFirst() {
Money.total(GBP_2_33, null, GBP_2_36);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_array_nullNotFirst() {
Money[] array = new Money[] {GBP_2_33, null, GBP_2_36};
Money.total(array);
}
//-----------------------------------------------------------------------
// total(Iterable)
//-----------------------------------------------------------------------
@Test
public void test_factory_total_Iterable() {
Iterable<Money> iterable = Arrays.asList(GBP_1_23, GBP_2_33, GBP_2_36);
Money test = Money.total(iterable);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(592, test.getAmountMinorInt());
}
@Test(expected = IllegalArgumentException.class)
public void test_factory_total_Iterable_empty() {
Iterable<Money> iterable = Collections.emptyList();
Money.total(iterable);
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_Iterable_currenciesDiffer() {
try {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, JPY_423);
Money.total(iterable);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_factory_total_Iterable_nullFirst() {
Iterable<Money> iterable = Arrays.asList(null, GBP_2_33, GBP_2_36);
Money.total(iterable);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_Iterable_nullNotFirst() {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, null, GBP_2_36);
Money.total(iterable);
}
//-----------------------------------------------------------------------
// total(CurrencyUnit,Money...)
//-----------------------------------------------------------------------
@Test
public void test_factory_total_CurrencyUnitVarargs_1() {
Money test = Money.total(GBP, GBP_1_23);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(123, test.getAmountMinorInt());
}
@Test
public void test_factory_total_CurrencyUnitArray_1() {
Money[] array = new Money[] {GBP_1_23};
Money test = Money.total(GBP, array);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(123, test.getAmountMinorInt());
}
@Test
public void test_factory_total_CurrencyUnitVarargs_3() {
Money test = Money.total(GBP, GBP_1_23, GBP_2_33, GBP_2_36);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(592, test.getAmountMinorInt());
}
@Test
public void test_factory_total_CurrencyUnitArray_3() {
Money[] array = new Money[] {GBP_1_23, GBP_2_33, GBP_2_36};
Money test = Money.total(GBP, array);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(592, test.getAmountMinorInt());
}
@Test
public void test_factory_total_CurrencyUnitVarargs_empty() {
Money test = Money.total(GBP);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(0, test.getAmountMinorInt());
}
@Test
public void test_factory_total_CurrencyUnitArray_empty() {
Money[] array = new Money[0];
Money test = Money.total(GBP, array);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(0, test.getAmountMinorInt());
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_CurrencyUnitVarargs_currenciesDiffer() {
try {
Money.total(GBP, JPY_423);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_CurrencyUnitArray_currenciesDiffer() {
try {
Money[] array = new Money[] {JPY_423};
Money.total(GBP, array);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_CurrencyUnitVarargs_currenciesDifferInArray() {
try {
Money.total(GBP, GBP_2_33, JPY_423);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_CurrencyUnitArray_currenciesDifferInArray() {
try {
Money[] array = new Money[] {GBP_2_33, JPY_423};
Money.total(GBP, array);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_factory_total_CurrencyUnitVarargs_nullFirst() {
Money.total(GBP, null, GBP_2_33, GBP_2_36);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_CurrencyUnitArray_nullFirst() {
Money[] array = new Money[] {null, GBP_2_33, GBP_2_36};
Money.total(GBP, array);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_CurrencyUnitVarargs_nullNotFirst() {
Money.total(GBP, GBP_2_33, null, GBP_2_36);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_CurrencyUnitArray_nullNotFirst() {
Money[] array = new Money[] {GBP_2_33, null, GBP_2_36};
Money.total(GBP, array);
}
//-----------------------------------------------------------------------
// total(CurrencyUnit,Iterable)
//-----------------------------------------------------------------------
@Test
public void test_factory_total_CurrencyUnitIterable() {
Iterable<Money> iterable = Arrays.asList(GBP_1_23, GBP_2_33, GBP_2_36);
Money test = Money.total(GBP, iterable);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(592, test.getAmountMinorInt());
}
@Test
public void test_factory_total_CurrencyUnitIterable_empty() {
Iterable<Money> iterable = Collections.emptyList();
Money test = Money.total(GBP, iterable);
assertEquals(GBP, test.getCurrencyUnit());
assertEquals(0, test.getAmountMinorInt());
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_CurrencyUnitIterable_currenciesDiffer() {
try {
Iterable<Money> iterable = Arrays.asList(JPY_423);
Money.total(GBP, iterable);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = CurrencyMismatchException.class)
public void test_factory_total_CurrencyUnitIterable_currenciesDifferInIterable() {
try {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, JPY_423);
Money.total(GBP, iterable);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_factory_total_CurrencyUnitIterable_nullFirst() {
Iterable<Money> iterable = Arrays.asList(null, GBP_2_33, GBP_2_36);
Money.total(GBP, iterable);
}
@Test(expected = NullPointerException.class)
public void test_factory_total_CurrencyUnitIterable_nullNotFirst() {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, null, GBP_2_36);
Money.total(GBP, iterable);
}
//-----------------------------------------------------------------------
// parse(String)
//-----------------------------------------------------------------------
@DataProvider
public static Object[][] data_parse() {
return new Object[][] {
{"GBP 2.43", GBP, 243},
{"GBP +12.57", GBP, 1257},
{"GBP -5.87", GBP, -587},
{"GBP 0.99", GBP, 99},
{"GBP .99", GBP, 99},
{"GBP +.99", GBP, 99},
{"GBP +0.99", GBP, 99},
{"GBP -.99", GBP, -99},
{"GBP -0.99", GBP, -99},
{"GBP 0", GBP, 0},
{"GBP 2", GBP, 200},
{"GBP 123.", GBP, 12300},
{"GBP3", GBP, 300},
{"GBP3.10", GBP, 310},
{"GBP 3.10", GBP, 310},
{"GBP 3.10", GBP, 310},
{"GBP 3.10", GBP, 310},
};
}
@Test
@UseDataProvider("data_parse")
public void test_factory_parse(String str, CurrencyUnit currency, int amount) {
Money test = Money.parse(str);
assertEquals(currency, test.getCurrencyUnit());
assertEquals(amount, test.getAmountMinorInt());
}
@Test(expected = IllegalArgumentException.class)
public void test_factory_parse_String_tooShort() {
Money.parse("GBP ");
}
@Test(expected = IllegalArgumentException.class)
public void test_factory_parse_String_badCurrency() {
Money.parse("GBX 2.34");
}
@Test(expected = NullPointerException.class)
public void test_factory_parse_String_nullString() {
Money.parse((String) null);
}
//-----------------------------------------------------------------------
// constructor
//-----------------------------------------------------------------------
@Test
public void test_constructor_null1() throws Exception {
Constructor<Money> con = Money.class.getDeclaredConstructor(BigMoney.class);
assertEquals(false, Modifier.isPublic(con.getModifiers()));
assertEquals(false, Modifier.isProtected(con.getModifiers()));
try {
con.setAccessible(true);
con.newInstance(new Object[] { null });
fail();
} catch (InvocationTargetException ex) {
assertEquals(AssertionError.class, ex.getCause().getClass());
}
}
@Test
public void test_constructor_scale() throws Exception {
Constructor<Money> con = Money.class.getDeclaredConstructor(BigMoney.class);
try {
con.setAccessible(true);
con.newInstance(new Object[] { BigMoney.of(GBP, BIGDEC_2_3) });
fail();
} catch (InvocationTargetException ex) {
assertEquals(AssertionError.class, ex.getCause().getClass());
}
}
//-----------------------------------------------------------------------
// serialization
//-----------------------------------------------------------------------
@Test
public void test_serialization() throws Exception {
Money a = GBP_2_34;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeObject(a);
oos.close();
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()));
Money input = (Money) ois.readObject();
assertEquals(a, input);
}
}
@Test(expected = InvalidObjectException.class)
public void test_serialization_invalidNumericCode() throws Exception {
CurrencyUnit cu = new CurrencyUnit("GBP", (short) 234, (short) 2);
Money m = Money.of(cu, 123.43d);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeObject(m);
oos.close();
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()));
ois.readObject();
}
}
@Test(expected = InvalidObjectException.class)
public void test_serialization_invalidDecimalPlaces() throws Exception {
CurrencyUnit cu = new CurrencyUnit("GBP", (short) 826, (short) 3);
Money m = Money.of(cu, 123.43d);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeObject(m);
oos.close();
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()));
ois.readObject();
}
}
//-----------------------------------------------------------------------
// getCurrencyUnit()
//-----------------------------------------------------------------------
@Test
public void test_getCurrencyUnit_GBP() {
assertEquals(GBP, GBP_2_34.getCurrencyUnit());
}
@Test
public void test_getCurrencyUnit_EUR() {
assertEquals(EUR, Money.parse("EUR -5.78").getCurrencyUnit());
}
//-----------------------------------------------------------------------
// withCurrencyUnit(Currency)
//-----------------------------------------------------------------------
@Test
public void test_withCurrencyUnit_Currency() {
Money test = GBP_2_34.withCurrencyUnit(USD);
assertEquals("USD 2.34", test.toString());
}
@Test
public void test_withCurrencyUnit_Currency_same() {
Money test = GBP_2_34.withCurrencyUnit(GBP);
assertSame(GBP_2_34, test);
}
@Test(expected = ArithmeticException.class)
public void test_withCurrencyUnit_Currency_scaleProblem() {
GBP_2_34.withCurrencyUnit(JPY);
}
@Test(expected = NullPointerException.class)
public void test_withCurrencyUnit_Currency_nullCurrency() {
GBP_2_34.withCurrencyUnit((CurrencyUnit) null);
}
//-----------------------------------------------------------------------
// withCurrencyUnit(Currency,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_withCurrencyUnit_CurrencyRoundingMode_DOWN() {
Money test = GBP_2_34.withCurrencyUnit(JPY, RoundingMode.DOWN);
assertEquals("JPY 2", test.toString());
}
@Test
public void test_withCurrencyUnit_CurrencyRoundingMode_UP() {
Money test = GBP_2_34.withCurrencyUnit(JPY, RoundingMode.UP);
assertEquals("JPY 3", test.toString());
}
@Test
public void test_withCurrencyUnit_CurrencyRoundingMode_same() {
Money test = GBP_2_34.withCurrencyUnit(GBP, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test(expected = ArithmeticException.class)
public void test_withCurrencyUnit_CurrencyRoundingMode_UNECESSARY() {
GBP_2_34.withCurrencyUnit(JPY, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_withCurrencyUnit_CurrencyRoundingMode_nullCurrency() {
GBP_2_34.withCurrencyUnit((CurrencyUnit) null, RoundingMode.UNNECESSARY);
}
//-----------------------------------------------------------------------
// getScale()
//-----------------------------------------------------------------------
@Test
public void test_getScale_GBP() {
assertEquals(2, GBP_2_34.getScale());
}
@Test
public void test_getScale_JPY() {
assertEquals(0, JPY_423.getScale());
}
//-----------------------------------------------------------------------
// getAmount()
//-----------------------------------------------------------------------
@Test
public void test_getAmount_positive() {
assertEquals(BIGDEC_2_34, GBP_2_34.getAmount());
}
@Test
public void test_getAmount_negative() {
assertEquals(BIGDEC_M5_78, GBP_M5_78.getAmount());
}
//-----------------------------------------------------------------------
// getAmountMajor()
//-----------------------------------------------------------------------
@Test
public void test_getAmountMajor_positive() {
assertEquals(BigDecimal.valueOf(2), GBP_2_34.getAmountMajor());
}
@Test
public void test_getAmountMajor_negative() {
assertEquals(BigDecimal.valueOf(-5), GBP_M5_78.getAmountMajor());
}
//-----------------------------------------------------------------------
// getAmountMajorLong()
//-----------------------------------------------------------------------
@Test
public void test_getAmountMajorLong_positive() {
assertEquals(2L, GBP_2_34.getAmountMajorLong());
}
@Test
public void test_getAmountMajorLong_negative() {
assertEquals(-5L, GBP_M5_78.getAmountMajorLong());
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMajorLong_tooBigPositive() {
GBP_LONG_MAX_MAJOR_PLUS1.getAmountMajorLong();
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMajorLong_tooBigNegative() {
GBP_LONG_MIN_MAJOR_MINUS1.getAmountMajorLong();
}
//-----------------------------------------------------------------------
// getAmountMajorInt()
//-----------------------------------------------------------------------
@Test
public void test_getAmountMajorInt_positive() {
assertEquals(2, GBP_2_34.getAmountMajorInt());
}
@Test
public void test_getAmountMajorInt_negative() {
assertEquals(-5, GBP_M5_78.getAmountMajorInt());
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMajorInt_tooBigPositive() {
GBP_INT_MAX_MAJOR_PLUS1.getAmountMajorInt();
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMajorInt_tooBigNegative() {
GBP_INT_MIN_MAJOR_MINUS1.getAmountMajorInt();
}
//-----------------------------------------------------------------------
// getAmountMinor()
//-----------------------------------------------------------------------
@Test
public void test_getAmountMinor_positive() {
assertEquals(BigDecimal.valueOf(234), GBP_2_34.getAmountMinor());
}
@Test
public void test_getAmountMinor_negative() {
assertEquals(BigDecimal.valueOf(-578), GBP_M5_78.getAmountMinor());
}
//-----------------------------------------------------------------------
// getAmountMinorLong()
//-----------------------------------------------------------------------
@Test
public void test_getAmountMinorLong_positive() {
assertEquals(234L, GBP_2_34.getAmountMinorLong());
}
@Test
public void test_getAmountMinorLong_negative() {
assertEquals(-578L, GBP_M5_78.getAmountMinorLong());
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMinorLong_tooBigPositive() {
GBP_LONG_MAX_PLUS1.getAmountMinorLong();
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMinorLong_tooBigNegative() {
GBP_LONG_MIN_MINUS1.getAmountMinorLong();
}
//-----------------------------------------------------------------------
// getAmountMinorInt()
//-----------------------------------------------------------------------
@Test
public void test_getAmountMinorInt_positive() {
assertEquals(234, GBP_2_34.getAmountMinorInt());
}
@Test
public void test_getAmountMinorInt_negative() {
assertEquals(-578, GBP_M5_78.getAmountMinorInt());
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMinorInt_tooBigPositive() {
GBP_INT_MAX_PLUS1.getAmountMinorInt();
}
@Test(expected = ArithmeticException.class)
public void test_getAmountMinorInt_tooBigNegative() {
GBP_INT_MIN_MINUS1.getAmountMinorInt();
}
//-----------------------------------------------------------------------
// getMinorPart()
//-----------------------------------------------------------------------
@Test
public void test_getMinorPart_positive() {
assertEquals(34, GBP_2_34.getMinorPart());
}
@Test
public void test_getMinorPart_negative() {
assertEquals(-78, GBP_M5_78.getMinorPart());
}
//-----------------------------------------------------------------------
// isZero()
//-----------------------------------------------------------------------
@Test
public void test_isZero() {
assertEquals(true, GBP_0_00.isZero());
assertEquals(false, GBP_2_34.isZero());
assertEquals(false, GBP_M5_78.isZero());
}
//-----------------------------------------------------------------------
// isPositive()
//-----------------------------------------------------------------------
@Test
public void test_isPositive() {
assertEquals(false, GBP_0_00.isPositive());
assertEquals(true, GBP_2_34.isPositive());
assertEquals(false, GBP_M5_78.isPositive());
}
//-----------------------------------------------------------------------
// isPositiveOrZero()
//-----------------------------------------------------------------------
@Test
public void test_isPositiveOrZero() {
assertEquals(true, GBP_0_00.isPositiveOrZero());
assertEquals(true, GBP_2_34.isPositiveOrZero());
assertEquals(false, GBP_M5_78.isPositiveOrZero());
}
//-----------------------------------------------------------------------
// isNegative()
//-----------------------------------------------------------------------
@Test
public void test_isNegative() {
assertEquals(false, GBP_0_00.isNegative());
assertEquals(false, GBP_2_34.isNegative());
assertEquals(true, GBP_M5_78.isNegative());
}
//-----------------------------------------------------------------------
// isNegativeOrZero()
//-----------------------------------------------------------------------
@Test
public void test_isNegativeOrZero() {
assertEquals(true, GBP_0_00.isNegativeOrZero());
assertEquals(false, GBP_2_34.isNegativeOrZero());
assertEquals(true, GBP_M5_78.isNegativeOrZero());
}
//-----------------------------------------------------------------------
// withAmount(BigDecimal)
//-----------------------------------------------------------------------
@Test
public void test_withAmount_BigDecimal() {
Money test = GBP_2_34.withAmount(BIGDEC_M5_78);
assertEquals("GBP -5.78", test.toString());
}
@Test
public void test_withAmount_BigDecimal_same() {
Money test = GBP_2_34.withAmount(BIGDEC_2_34);
assertSame(GBP_2_34, test);
}
@Test(expected = ArithmeticException.class)
public void test_withAmount_BigDecimal_invalidScale() {
GBP_2_34.withAmount(new BigDecimal("2.345"));
}
@Test(expected = NullPointerException.class)
public void test_withAmount_BigDecimal_nullBigDecimal() {
GBP_2_34.withAmount((BigDecimal) null);
}
//-----------------------------------------------------------------------
// withAmount(BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_withAmount_BigDecimalRoundingMode() {
Money test = GBP_2_34.withAmount(BIGDEC_M5_78, RoundingMode.UNNECESSARY);
assertEquals("GBP -5.78", test.toString());
}
@Test
public void test_withAmount_BigDecimalRoundingMode_same() {
Money test = GBP_2_34.withAmount(BIGDEC_2_34, RoundingMode.UNNECESSARY);
assertSame(GBP_2_34, test);
}
@Test
public void test_withAmount_BigDecimalRoundingMode_roundDown() {
Money test = GBP_2_34.withAmount(new BigDecimal("2.355"), RoundingMode.DOWN);
assertEquals(GBP_2_35, test);
}
@Test(expected = ArithmeticException.class)
public void test_withAmount_BigDecimalRoundingMode_roundUnecessary() {
GBP_2_34.withAmount(new BigDecimal("2.345"), RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_withAmount_BigDecimalRoundingMode_nullBigDecimal() {
GBP_2_34.withAmount((BigDecimal) null, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_withAmount_BigDecimalRoundingMode_nullRoundingMode() {
GBP_2_34.withAmount(BIGDEC_2_34, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// withAmount(double)
//-----------------------------------------------------------------------
@Test
public void test_withAmount_double() {
Money test = GBP_2_34.withAmount(-5.78d);
assertEquals("GBP -5.78", test.toString());
}
@Test
public void test_withAmount_double_same() {
Money test = GBP_2_34.withAmount(2.34d);
assertSame(GBP_2_34, test);
}
@Test(expected = ArithmeticException.class)
public void test_withAmount_double_invalidScale() {
GBP_2_34.withAmount(2.345d);
}
//-----------------------------------------------------------------------
// withAmount(double,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_withAmount_doubleRoundingMode() {
Money test = GBP_2_34.withAmount(-5.78d, RoundingMode.UNNECESSARY);
assertEquals("GBP -5.78", test.toString());
}
@Test
public void test_withAmount_doubleRoundingMode_same() {
Money test = GBP_2_34.withAmount(2.34d, RoundingMode.UNNECESSARY);
assertSame(GBP_2_34, test);
}
@Test
public void test_withAmount_doubleRoundingMode_roundDown() {
Money test = GBP_2_34.withAmount(2.355d, RoundingMode.DOWN);
assertEquals(GBP_2_35, test);
}
@Test(expected = ArithmeticException.class)
public void test_withAmount_doubleRoundingMode_roundUnecessary() {
GBP_2_34.withAmount(2.345d, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_withAmount_doubleRoundingMode_nullRoundingMode() {
GBP_2_34.withAmount(BIGDEC_2_34, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// plus(Iterable)
//-----------------------------------------------------------------------
@Test
public void test_plus_Iterable() {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, GBP_1_23);
Money test = GBP_2_34.plus(iterable);
assertEquals("GBP 5.90", test.toString());
}
@Test
public void test_plus_Iterable_zero() {
Iterable<Money> iterable = Arrays.asList(GBP_0_00);
Money test = GBP_2_34.plus(iterable);
assertSame(GBP_2_34, test);
}
@Test(expected = CurrencyMismatchException.class)
public void test_plus_Iterable_currencyMismatch() {
try {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, JPY_423);
GBP_M5_78.plus(iterable);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_plus_Iterable_nullEntry() {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, null);
GBP_M5_78.plus(iterable);
}
@Test(expected = NullPointerException.class)
public void test_plus_Iterable_nullIterable() {
GBP_M5_78.plus((Iterable<Money>) null);
}
//-----------------------------------------------------------------------
// plus(Money)
//-----------------------------------------------------------------------
@Test
public void test_plus_Money_zero() {
Money test = GBP_2_34.plus(GBP_0_00);
assertSame(GBP_2_34, test);
}
@Test
public void test_plus_Money_positive() {
Money test = GBP_2_34.plus(GBP_1_23);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_plus_Money_negative() {
Money test = GBP_2_34.plus(GBP_M1_23);
assertEquals("GBP 1.11", test.toString());
}
@Test(expected = CurrencyMismatchException.class)
public void test_plus_Money_currencyMismatch() {
try {
GBP_M5_78.plus(USD_1_23);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(USD, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_plus_Money_nullMoney() {
GBP_M5_78.plus((Money) null);
}
//-----------------------------------------------------------------------
// plus(BigDecimal)
//-----------------------------------------------------------------------
@Test
public void test_plus_BigDecimal_zero() {
Money test = GBP_2_34.plus(BigDecimal.ZERO);
assertSame(GBP_2_34, test);
}
@Test
public void test_plus_BigDecimal_positive() {
Money test = GBP_2_34.plus(new BigDecimal("1.23"));
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_plus_BigDecimal_negative() {
Money test = GBP_2_34.plus(new BigDecimal("-1.23"));
assertEquals("GBP 1.11", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_plus_BigDecimal_invalidScale() {
GBP_2_34.plus(new BigDecimal("1.235"));
}
@Test(expected = NullPointerException.class)
public void test_plus_BigDecimal_nullBigDecimal() {
GBP_M5_78.plus((BigDecimal) null);
}
//-----------------------------------------------------------------------
// plus(BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_plus_BigDecimalRoundingMode_zero() {
Money test = GBP_2_34.plus(BigDecimal.ZERO, RoundingMode.UNNECESSARY);
assertSame(GBP_2_34, test);
}
@Test
public void test_plus_BigDecimalRoundingMode_positive() {
Money test = GBP_2_34.plus(new BigDecimal("1.23"), RoundingMode.UNNECESSARY);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_plus_BigDecimalRoundingMode_negative() {
Money test = GBP_2_34.plus(new BigDecimal("-1.23"), RoundingMode.UNNECESSARY);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_plus_BigDecimalRoundingMode_roundDown() {
Money test = GBP_2_34.plus(new BigDecimal("1.235"), RoundingMode.DOWN);
assertEquals("GBP 3.57", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_plus_BigDecimalRoundingMode_roundUnecessary() {
GBP_2_34.plus(new BigDecimal("1.235"), RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_plus_BigDecimalRoundingMode_nullBigDecimal() {
GBP_M5_78.plus((BigDecimal) null, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_plus_BigDecimalRoundingMode_nullRoundingMode() {
GBP_M5_78.plus(BIGDEC_2_34, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// plus(double)
//-----------------------------------------------------------------------
@Test
public void test_plus_double_zero() {
Money test = GBP_2_34.plus(0d);
assertSame(GBP_2_34, test);
}
@Test
public void test_plus_double_positive() {
Money test = GBP_2_34.plus(1.23d);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_plus_double_negative() {
Money test = GBP_2_34.plus(-1.23d);
assertEquals("GBP 1.11", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_plus_double_invalidScale() {
GBP_2_34.plus(1.235d);
}
//-----------------------------------------------------------------------
// plus(double,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_plus_doubleRoundingMode_zero() {
Money test = GBP_2_34.plus(0d, RoundingMode.UNNECESSARY);
assertSame(GBP_2_34, test);
}
@Test
public void test_plus_doubleRoundingMode_positive() {
Money test = GBP_2_34.plus(1.23d, RoundingMode.UNNECESSARY);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_plus_doubleRoundingMode_negative() {
Money test = GBP_2_34.plus(-1.23d, RoundingMode.UNNECESSARY);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_plus_doubleRoundingMode_roundDown() {
Money test = GBP_2_34.plus(1.235d, RoundingMode.DOWN);
assertEquals("GBP 3.57", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_plus_doubleRoundingMode_roundUnecessary() {
GBP_2_34.plus(1.235d, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_plus_doubleRoundingMode_nullRoundingMode() {
GBP_M5_78.plus(2.34d, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// plusMajor(long)
//-----------------------------------------------------------------------
@Test
public void test_plusMajor_zero() {
Money test = GBP_2_34.plusMajor(0);
assertSame(GBP_2_34, test);
}
@Test
public void test_plusMajor_positive() {
Money test = GBP_2_34.plusMajor(123);
assertEquals("GBP 125.34", test.toString());
}
@Test
public void test_plusMajor_negative() {
Money test = GBP_2_34.plusMajor(-123);
assertEquals("GBP -120.66", test.toString());
}
//-----------------------------------------------------------------------
// plusMinor(long)
//-----------------------------------------------------------------------
@Test
public void test_plusMinor_zero() {
Money test = GBP_2_34.plusMinor(0);
assertSame(GBP_2_34, test);
}
@Test
public void test_plusMinor_positive() {
Money test = GBP_2_34.plusMinor(123);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_plusMinor_negative() {
Money test = GBP_2_34.plusMinor(-123);
assertEquals("GBP 1.11", test.toString());
}
//-----------------------------------------------------------------------
// minus(Iterable)
//-----------------------------------------------------------------------
@Test
public void test_minus_Iterable() {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, GBP_1_23);
Money test = GBP_2_34.minus(iterable);
assertEquals("GBP -1.22", test.toString());
}
@Test
public void test_minus_Iterable_zero() {
Iterable<Money> iterable = Arrays.asList(GBP_0_00);
Money test = GBP_2_34.minus(iterable);
assertSame(GBP_2_34, test);
}
@Test(expected = CurrencyMismatchException.class)
public void test_minus_Iterable_currencyMismatch() {
try {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, JPY_423);
GBP_M5_78.minus(iterable);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(JPY, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_minus_Iterable_nullEntry() {
Iterable<Money> iterable = Arrays.asList(GBP_2_33, null);
GBP_M5_78.minus(iterable);
}
@Test(expected = NullPointerException.class)
public void test_minus_Iterable_nullIterable() {
GBP_M5_78.minus((Iterable<Money>) null);
}
//-----------------------------------------------------------------------
// minus(Money)
//-----------------------------------------------------------------------
@Test
public void test_minus_Money_zero() {
Money test = GBP_2_34.minus(GBP_0_00);
assertSame(GBP_2_34, test);
}
@Test
public void test_minus_Money_positive() {
Money test = GBP_2_34.minus(GBP_1_23);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_minus_Money_negative() {
Money test = GBP_2_34.minus(GBP_M1_23);
assertEquals("GBP 3.57", test.toString());
}
@Test(expected = CurrencyMismatchException.class)
public void test_minus_Money_currencyMismatch() {
try {
GBP_M5_78.minus(USD_1_23);
} catch (CurrencyMismatchException ex) {
assertEquals(GBP, ex.getFirstCurrency());
assertEquals(USD, ex.getSecondCurrency());
throw ex;
}
}
@Test(expected = NullPointerException.class)
public void test_minus_Money_nullMoney() {
GBP_M5_78.minus((Money) null);
}
//-----------------------------------------------------------------------
// minus(BigDecimal)
//-----------------------------------------------------------------------
@Test
public void test_minus_BigDecimal_zero() {
Money test = GBP_2_34.minus(BigDecimal.ZERO);
assertSame(GBP_2_34, test);
}
@Test
public void test_minus_BigDecimal_positive() {
Money test = GBP_2_34.minus(new BigDecimal("1.23"));
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_minus_BigDecimal_negative() {
Money test = GBP_2_34.minus(new BigDecimal("-1.23"));
assertEquals("GBP 3.57", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_minus_BigDecimal_invalidScale() {
GBP_2_34.minus(new BigDecimal("1.235"));
}
@Test(expected = NullPointerException.class)
public void test_minus_BigDecimal_nullBigDecimal() {
GBP_M5_78.minus((BigDecimal) null);
}
//-----------------------------------------------------------------------
// minus(BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_minus_BigDecimalRoundingMode_zero() {
Money test = GBP_2_34.minus(BigDecimal.ZERO, RoundingMode.UNNECESSARY);
assertSame(GBP_2_34, test);
}
@Test
public void test_minus_BigDecimalRoundingMode_positive() {
Money test = GBP_2_34.minus(new BigDecimal("1.23"), RoundingMode.UNNECESSARY);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_minus_BigDecimalRoundingMode_negative() {
Money test = GBP_2_34.minus(new BigDecimal("-1.23"), RoundingMode.UNNECESSARY);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_minus_BigDecimalRoundingMode_roundDown() {
Money test = GBP_2_34.minus(new BigDecimal("1.235"), RoundingMode.DOWN);
assertEquals("GBP 1.10", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_minus_BigDecimalRoundingMode_roundUnecessary() {
GBP_2_34.minus(new BigDecimal("1.235"), RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_minus_BigDecimalRoundingMode_nullBigDecimal() {
GBP_M5_78.minus((BigDecimal) null, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_minus_BigDecimalRoundingMode_nullRoundingMode() {
GBP_M5_78.minus(BIGDEC_2_34, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// minus(double)
//-----------------------------------------------------------------------
@Test
public void test_minus_double_zero() {
Money test = GBP_2_34.minus(0d);
assertSame(GBP_2_34, test);
}
@Test
public void test_minus_double_positive() {
Money test = GBP_2_34.minus(1.23d);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_minus_double_negative() {
Money test = GBP_2_34.minus(-1.23d);
assertEquals("GBP 3.57", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_minus_double_invalidScale() {
GBP_2_34.minus(1.235d);
}
//-----------------------------------------------------------------------
// minus(double,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_minus_doubleRoundingMode_zero() {
Money test = GBP_2_34.minus(0d, RoundingMode.UNNECESSARY);
assertSame(GBP_2_34, test);
}
@Test
public void test_minus_doubleRoundingMode_positive() {
Money test = GBP_2_34.minus(1.23d, RoundingMode.UNNECESSARY);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_minus_doubleRoundingMode_negative() {
Money test = GBP_2_34.minus(-1.23d, RoundingMode.UNNECESSARY);
assertEquals("GBP 3.57", test.toString());
}
@Test
public void test_minus_doubleRoundingMode_roundDown() {
Money test = GBP_2_34.minus(1.235d, RoundingMode.DOWN);
assertEquals("GBP 1.10", test.toString());
}
@Test(expected = ArithmeticException.class)
public void test_minus_doubleRoundingMode_roundUnecessary() {
GBP_2_34.minus(1.235d, RoundingMode.UNNECESSARY);
}
@Test(expected = NullPointerException.class)
public void test_minus_doubleRoundingMode_nullRoundingMode() {
GBP_M5_78.minus(2.34d, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// minusMajor(long)
//-----------------------------------------------------------------------
@Test
public void test_minusMajor_zero() {
Money test = GBP_2_34.minusMajor(0);
assertSame(GBP_2_34, test);
}
@Test
public void test_minusMajor_positive() {
Money test = GBP_2_34.minusMajor(123);
assertEquals("GBP -120.66", test.toString());
}
@Test
public void test_minusMajor_negative() {
Money test = GBP_2_34.minusMajor(-123);
assertEquals("GBP 125.34", test.toString());
}
//-----------------------------------------------------------------------
// minusMinor(long)
//-----------------------------------------------------------------------
@Test
public void test_minusMinor_zero() {
Money test = GBP_2_34.minusMinor(0);
assertSame(GBP_2_34, test);
}
@Test
public void test_minusMinor_positive() {
Money test = GBP_2_34.minusMinor(123);
assertEquals("GBP 1.11", test.toString());
}
@Test
public void test_minusMinor_negative() {
Money test = GBP_2_34.minusMinor(-123);
assertEquals("GBP 3.57", test.toString());
}
//-----------------------------------------------------------------------
// multipliedBy(BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_multipliedBy_BigDecimalRoundingMode_one() {
Money test = GBP_2_34.multipliedBy(BigDecimal.ONE, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_multipliedBy_BigDecimalRoundingMode_positive() {
Money test = GBP_2_33.multipliedBy(new BigDecimal("2.5"), RoundingMode.DOWN);
assertEquals("GBP 5.82", test.toString());
}
@Test
public void test_multipliedBy_BigDecimalRoundingMode_positive_halfUp() {
Money test = GBP_2_33.multipliedBy(new BigDecimal("2.5"), RoundingMode.HALF_UP);
assertEquals("GBP 5.83", test.toString());
}
@Test
public void test_multipliedBy_BigDecimalRoundingMode_negative() {
Money test = GBP_2_33.multipliedBy(new BigDecimal("-2.5"), RoundingMode.FLOOR);
assertEquals("GBP -5.83", test.toString());
}
@Test(expected = NullPointerException.class)
public void test_multipliedBy_BigDecimalRoundingMode_nullBigDecimal() {
GBP_5_78.multipliedBy((BigDecimal) null, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_multipliedBy_BigDecimalRoundingMode_nullRoundingMode() {
GBP_5_78.multipliedBy(new BigDecimal("2.5"), (RoundingMode) null);
}
//-----------------------------------------------------------------------
// multipliedBy(double,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_multipliedBy_doubleRoundingMode_one() {
Money test = GBP_2_34.multipliedBy(1d, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_multipliedBy_doubleRoundingMode_positive() {
Money test = GBP_2_33.multipliedBy(2.5d, RoundingMode.DOWN);
assertEquals("GBP 5.82", test.toString());
}
@Test
public void test_multipliedBy_doubleRoundingMode_positive_halfUp() {
Money test = GBP_2_33.multipliedBy(2.5d, RoundingMode.HALF_UP);
assertEquals("GBP 5.83", test.toString());
}
@Test
public void test_multipliedBy_doubleRoundingMode_negative() {
Money test = GBP_2_33.multipliedBy(-2.5d, RoundingMode.FLOOR);
assertEquals("GBP -5.83", test.toString());
}
@Test(expected = NullPointerException.class)
public void test_multipliedBy_doubleRoundingMode_nullRoundingMode() {
GBP_5_78.multipliedBy(2.5d, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// multipliedBy(long)
//-----------------------------------------------------------------------
@Test
public void test_multipliedBy_long_one() {
Money test = GBP_2_34.multipliedBy(1);
assertSame(GBP_2_34, test);
}
@Test
public void test_multipliedBy_long_positive() {
Money test = GBP_2_34.multipliedBy(3);
assertEquals("GBP 7.02", test.toString());
}
@Test
public void test_multipliedBy_long_negative() {
Money test = GBP_2_34.multipliedBy(-3);
assertEquals("GBP -7.02", test.toString());
}
//-----------------------------------------------------------------------
// dividedBy(BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_dividedBy_BigDecimalRoundingMode_one() {
Money test = GBP_2_34.dividedBy(BigDecimal.ONE, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_dividedBy_BigDecimalRoundingMode_positive() {
Money test = GBP_2_34.dividedBy(new BigDecimal("2.5"), RoundingMode.DOWN);
assertEquals("GBP 0.93", test.toString());
}
@Test
public void test_dividedBy_BigDecimalRoundingMode_positive_halfUp() {
Money test = GBP_2_34.dividedBy(new BigDecimal("2.5"), RoundingMode.HALF_UP);
assertEquals("GBP 0.94", test.toString());
}
@Test
public void test_dividedBy_BigDecimalRoundingMode_negative() {
Money test = GBP_2_34.dividedBy(new BigDecimal("-2.5"), RoundingMode.FLOOR);
assertEquals("GBP -0.94", test.toString());
}
@Test(expected = NullPointerException.class)
public void test_dividedBy_BigDecimalRoundingMode_nullBigDecimal() {
GBP_5_78.dividedBy((BigDecimal) null, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_dividedBy_BigDecimalRoundingMode_nullRoundingMode() {
GBP_5_78.dividedBy(new BigDecimal("2.5"), (RoundingMode) null);
}
//-----------------------------------------------------------------------
// dividedBy(double,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_dividedBy_doubleRoundingMode_one() {
Money test = GBP_2_34.dividedBy(1d, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_dividedBy_doubleRoundingMode_positive() {
Money test = GBP_2_34.dividedBy(2.5d, RoundingMode.DOWN);
assertEquals("GBP 0.93", test.toString());
}
@Test
public void test_dividedBy_doubleRoundingMode_positive_halfUp() {
Money test = GBP_2_34.dividedBy(2.5d, RoundingMode.HALF_UP);
assertEquals("GBP 0.94", test.toString());
}
@Test
public void test_dividedBy_doubleRoundingMode_negative() {
Money test = GBP_2_34.dividedBy(-2.5d, RoundingMode.FLOOR);
assertEquals("GBP -0.94", test.toString());
}
@Test(expected = NullPointerException.class)
public void test_dividedBy_doubleRoundingMode_nullRoundingMode() {
GBP_5_78.dividedBy(2.5d, (RoundingMode) null);
}
//-----------------------------------------------------------------------
// dividedBy(long,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_dividedBy_long_one() {
Money test = GBP_2_34.dividedBy(1, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_dividedBy_long_positive() {
Money test = GBP_2_34.dividedBy(3, RoundingMode.DOWN);
assertEquals("GBP 0.78", test.toString());
}
@Test
public void test_dividedBy_long_positive_roundDown() {
Money test = GBP_2_35.dividedBy(3, RoundingMode.DOWN);
assertEquals("GBP 0.78", test.toString());
}
@Test
public void test_dividedBy_long_positive_roundUp() {
Money test = GBP_2_35.dividedBy(3, RoundingMode.UP);
assertEquals("GBP 0.79", test.toString());
}
@Test
public void test_dividedBy_long_negative() {
Money test = GBP_2_34.dividedBy(-3, RoundingMode.DOWN);
assertEquals("GBP -0.78", test.toString());
}
//-----------------------------------------------------------------------
// negated()
//-----------------------------------------------------------------------
@Test
public void test_negated_positive() {
Money test = GBP_2_34.negated();
assertEquals("GBP -2.34", test.toString());
}
@Test
public void test_negated_negative() {
Money test = Money.parse("GBP -2.34").negated();
assertEquals("GBP 2.34", test.toString());
}
//-----------------------------------------------------------------------
// abs()
//-----------------------------------------------------------------------
@Test
public void test_abs_positive() {
Money test = GBP_2_34.abs();
assertSame(GBP_2_34, test);
}
@Test
public void test_abs_negative() {
Money test = Money.parse("GBP -2.34").abs();
assertEquals("GBP 2.34", test.toString());
}
//-----------------------------------------------------------------------
// rounded()
//-----------------------------------------------------------------------
@Test
public void test_round_2down() {
Money test = GBP_2_34.rounded(2, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_round_2up() {
Money test = GBP_2_34.rounded(2, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
@Test
public void test_round_1down() {
Money test = GBP_2_34.rounded(1, RoundingMode.DOWN);
assertEquals("GBP 2.30", test.toString());
}
@Test
public void test_round_1up() {
Money test = GBP_2_34.rounded(1, RoundingMode.UP);
assertEquals("GBP 2.40", test.toString());
}
@Test
public void test_round_0down() {
Money test = GBP_2_34.rounded(0, RoundingMode.DOWN);
assertEquals("GBP 2.00", test.toString());
}
@Test
public void test_round_0up() {
Money test = GBP_2_34.rounded(0, RoundingMode.UP);
assertEquals("GBP 3.00", test.toString());
}
@Test
public void test_round_M1down() {
Money test = Money.parse("GBP 432.34").rounded(-1, RoundingMode.DOWN);
assertEquals("GBP 430.00", test.toString());
}
@Test
public void test_round_M1up() {
Money test = Money.parse("GBP 432.34").rounded(-1, RoundingMode.UP);
assertEquals("GBP 440.00", test.toString());
}
@Test
public void test_round_3() {
Money test = GBP_2_34.rounded(3, RoundingMode.DOWN);
assertSame(GBP_2_34, test);
}
//-----------------------------------------------------------------------
// convertedTo(BigDecimal,RoundingMode)
//-----------------------------------------------------------------------
@Test
public void test_convertedTo_BigDecimalRoundingMode_positive() {
Money test = GBP_2_33.convertedTo(EUR, new BigDecimal("2.5"), RoundingMode.DOWN);
assertEquals("EUR 5.82", test.toString());
}
@Test
public void test_convertedTo_BigDecimalRoundingMode_positive_halfUp() {
Money test = GBP_2_33.convertedTo(EUR, new BigDecimal("2.5"), RoundingMode.HALF_UP);
assertEquals("EUR 5.83", test.toString());
}
@Test(expected = IllegalArgumentException.class)
public void test_convertedTo_BigDecimalRoundingMode_negative() {
GBP_2_33.convertedTo(EUR, new BigDecimal("-2.5"), RoundingMode.FLOOR);
}
@Test(expected = IllegalArgumentException.class)
public void test_convertedTo_BigDecimalRoundingMode_sameCurrency() {
GBP_2_33.convertedTo(GBP, new BigDecimal("2.5"), RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_convertedTo_BigDecimalRoundingMode_nullCurrency() {
GBP_5_78.convertedTo((CurrencyUnit) null, new BigDecimal("2"), RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_convertedTo_BigDecimalRoundingMode_nullBigDecimal() {
GBP_5_78.convertedTo(EUR, (BigDecimal) null, RoundingMode.DOWN);
}
@Test(expected = NullPointerException.class)
public void test_convertedTo_BigDecimalRoundingMode_nullRoundingMode() {
GBP_5_78.convertedTo(EUR, new BigDecimal("2.5"), (RoundingMode) null);
}
//-----------------------------------------------------------------------
// toMoney()
//-----------------------------------------------------------------------
@Test
public void test_toBigMoney() {
assertEquals(BigMoney.ofMinor(GBP, 234), GBP_2_34.toBigMoney());
}
//-----------------------------------------------------------------------
// isSameCurrency(Money)
//-----------------------------------------------------------------------
@Test
public void test_isSameCurrency_Money_same() {
assertEquals(true, GBP_2_34.isSameCurrency(GBP_2_35));
}
@Test
public void test_isSameCurrency_Money_different() {
assertEquals(false, GBP_2_34.isSameCurrency(USD_2_34));
}
@Test
public void test_isSameCurrency_BigMoney_same() {
assertEquals(true, GBP_2_34.isSameCurrency(BigMoney.parse("GBP 2")));
}
@Test
public void test_isSameCurrency_BigMoney_different() {
assertEquals(false, GBP_2_34.isSameCurrency(BigMoney.parse("USD 2")));
}
@Test(expected = NullPointerException.class)
public void test_isSameCurrency_Money_nullMoney() {
GBP_2_34.isSameCurrency((Money) null);
}
//-----------------------------------------------------------------------
// compareTo()
//-----------------------------------------------------------------------
@Test
public void test_compareTo_Money() {
Money a = GBP_2_34;
Money b = GBP_2_35;
Money c = GBP_2_36;
assertEquals(0, a.compareTo(a));
assertEquals(0, b.compareTo(b));
assertEquals(0, c.compareTo(c));
assertEquals(-1, a.compareTo(b));
assertEquals(1, b.compareTo(a));
assertEquals(-1, a.compareTo(c));
assertEquals(1, c.compareTo(a));
assertEquals(-1, b.compareTo(c));
assertEquals(1, c.compareTo(b));
}
@Test
public void test_compareTo_BigMoney() {
Money t = GBP_2_35;
BigMoney a = BigMoney.ofMinor(GBP, 234);
BigMoney b = BigMoney.ofMinor(GBP, 235);
BigMoney c = BigMoney.ofMinor(GBP, 236);
assertEquals(1, t.compareTo(a));
assertEquals(0, t.compareTo(b));
assertEquals(-1, t.compareTo(c));
}
@Test(expected = CurrencyMismatchException.class)
public void test_compareTo_currenciesDiffer() {
Money a = GBP_2_34;
Money b = USD_2_35;
a.compareTo(b);
}
@Test(expected = ClassCastException.class)
@SuppressWarnings({ "unchecked", "rawtypes" })
public void test_compareTo_wrongType() {
Comparable a = GBP_2_34;
a.compareTo("NotRightType");
}
//-----------------------------------------------------------------------
// isEqual()
//-----------------------------------------------------------------------
@Test
public void test_isEqual() {
Money a = GBP_2_34;
Money b = GBP_2_35;
Money c = GBP_2_36;
assertEquals(true, a.isEqual(a));
assertEquals(true, b.isEqual(b));
assertEquals(true, c.isEqual(c));
assertEquals(false, a.isEqual(b));
assertEquals(false, b.isEqual(a));
assertEquals(false, a.isEqual(c));
assertEquals(false, c.isEqual(a));
assertEquals(false, b.isEqual(c));
assertEquals(false, c.isEqual(b));
}
@Test
public void test_isEqual_Money() {
Money a = GBP_2_34;
BigMoney b = BigMoney.ofMinor(GBP, 234);
assertEquals(true, a.isEqual(b));
}
@Test(expected = CurrencyMismatchException.class)
public void test_isEqual_currenciesDiffer() {
Money a = GBP_2_34;
Money b = USD_2_35;
a.isEqual(b);
}
//-----------------------------------------------------------------------
// isGreaterThan()
//-----------------------------------------------------------------------
@Test
public void test_isGreaterThan() {
Money a = GBP_2_34;
Money b = GBP_2_35;
Money c = GBP_2_36;
assertEquals(false, a.isGreaterThan(a));
assertEquals(false, b.isGreaterThan(b));
assertEquals(false, c.isGreaterThan(c));
assertEquals(false, a.isGreaterThan(b));
assertEquals(true, b.isGreaterThan(a));
assertEquals(false, a.isGreaterThan(c));
assertEquals(true, c.isGreaterThan(a));
assertEquals(false, b.isGreaterThan(c));
assertEquals(true, c.isGreaterThan(b));
}
@Test(expected = CurrencyMismatchException.class)
public void test_isGreaterThan_currenciesDiffer() {
Money a = GBP_2_34;
Money b = USD_2_35;
a.isGreaterThan(b);
}
//-----------------------------------------------------------------------
// isLessThan()
//-----------------------------------------------------------------------
@Test
public void test_isLessThan() {
Money a = GBP_2_34;
Money b = GBP_2_35;
Money c = GBP_2_36;
assertEquals(false, a.isLessThan(a));
assertEquals(false, b.isLessThan(b));
assertEquals(false, c.isLessThan(c));
assertEquals(true, a.isLessThan(b));
assertEquals(false, b.isLessThan(a));
assertEquals(true, a.isLessThan(c));
assertEquals(false, c.isLessThan(a));
assertEquals(true, b.isLessThan(c));
assertEquals(false, c.isLessThan(b));
}
@Test(expected = CurrencyMismatchException.class)
public void test_isLessThan_currenciesDiffer() {
Money a = GBP_2_34;
Money b = USD_2_35;
a.isLessThan(b);
}
//-----------------------------------------------------------------------
// equals() hashCode()
//-----------------------------------------------------------------------
@Test
public void test_equals_hashCode_positive() {
Money a = GBP_2_34;
Money b = GBP_2_34;
Money c = GBP_2_35;
assertEquals(true, a.equals(a));
assertEquals(true, b.equals(b));
assertEquals(true, c.equals(c));
assertEquals(true, a.equals(b));
assertEquals(true, b.equals(a));
assertEquals(true, a.hashCode() == b.hashCode());
assertEquals(false, a.equals(c));
assertEquals(false, b.equals(c));
}
@Test
public void test_equals_false() {
Money a = GBP_2_34;
assertEquals(false, a.equals(null));
assertEquals(false, a.equals(new Object()));
}
//-----------------------------------------------------------------------
// toString()
//-----------------------------------------------------------------------
@Test
public void test_toString_positive() {
Money test = Money.of(GBP, BIGDEC_2_34);
assertEquals("GBP 2.34", test.toString());
}
@Test
public void test_toString_negative() {
Money test = Money.of(EUR, BIGDEC_M5_78);
assertEquals("EUR -5.78", test.toString());
}
}
| |
/*
* Copyright 2013-2014 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.client.document;
import javax.xml.namespace.QName;
import com.marklogic.client.MarkLogicIOException;
import com.marklogic.client.io.DocumentMetadataHandle;
import com.marklogic.client.io.marker.DocumentPatchHandle;
import com.marklogic.client.util.IterableNamespaceContext;
/**
* A DocumentMetadataPatchBuilder specifies changes to the metadata
* of a database document. When using the newPatchBuilder() factory
* method of the DocumentManager to create the builder, you identify
* whether paths are specified with JSONPath or XPath.
*/
public interface DocumentMetadataPatchBuilder {
/**
* The Cardinality enumeration indicates the number of times
* that a path can match without error (which defaults to
* ZERO_OR_MORE, meaning any number of matches).
*/
public enum Cardinality {
/**
* Allows at most one match.
*/
ZERO_OR_ONE,
/**
* Requires exactly one match.
*/
ONE,
/**
* Allows any number of matches.
*/
ZERO_OR_MORE,
/**
* Requires at least one match.
*/
ONE_OR_MORE;
/**
* Returns the standard abbreviation for the cardinality value.
* @return the abbreviation
*/
public String abbreviate() {
switch(this) {
case ZERO_OR_ONE:
return "?";
case ONE:
return ".";
case ZERO_OR_MORE:
return "*";
case ONE_OR_MORE:
return "+";
default:
throw new InternalError("Unknown Cardinality: "+toString());
}
}
}
/**
* A Call specifies how to apply a built-in or library function
* when replacing an existing fragment. You must construct a call
* using the CallBuilder.
*/
public interface Call {
}
/**
* A CallBuilder constructs a Call to a built-in or library function
* to produce the replacement for an existing fragment. You must
* construct the CallBuilder using the factory method of the
* DocumentPatchBuilder.
*/
public interface CallBuilder {
/**
* Calls the built-in method to add to an existing value.
* @param number the added number
* @return the specification of the add call
*/
public Call add(Number number);
/**
* Calls the built-in method to subtract from an existing value.
* @param number the subtracted number
* @return the specification of the subtract call
*/
public Call subtract(Number number);
/**
* Calls the built-in method to multiply an existing value.
* @param number the multiplier
* @return the specification of the multiply call
*/
public Call multiply(Number number);
/**
* Calls the built-in method to divide an existing value
* by the supplied number.
* @param number the divisor
* @return the specification of the divide call
*/
public Call divideBy(Number number);
/**
* Calls the built-in method to append an existing string
* to the supplied string.
* @param prefix the string that appears first
* @return the specification of the concatenation call
*/
public Call concatenateAfter(String prefix);
/**
* Calls the built-in method to concatenate an existing string
* between the supplied strings.
* @param prefix the string that appears first
* @param suffix the string that appears last
* @return the specification of the concatenation call
*/
public Call concatenateBetween(String prefix, String suffix);
/**
* Calls the built-in method to concatenate an existing string
* before the supplied string.
* @param suffix the string that appears last
* @return the specification of the concatenation call
*/
public Call concatenateBefore(String suffix);
/**
* Calls the built-in method to reduce an existing string
* to a trailing substring.
* @param prefix the initial part of the string
* @return the specification of the substring call
*/
public Call substringAfter(String prefix);
/**
* Calls the built-in method to reduce an existing string
* to a leading substring.
* @param suffix the final part of the string
* @return the specification of the substring call
*/
public Call substringBefore(String suffix);
/**
* Calls the built-in method to modify an existing string
* with a regular expression
* @param pattern the matching regular expression
* @param replacement the replacement for the match
* @return the specification of the regex call
*/
public Call replaceRegex(String pattern, String replacement);
/**
* Calls the built-in method to modify an existing string
* with a regular expression
* @param pattern the matching regular expression
* @param replacement the replacement for the match
* @param flags the regex flags
* @return the specification of the regex call
*/
public Call replaceRegex(
String pattern, String replacement, String flags
);
/**
* Calls a function with no arguments other than the existing
* fragment. The function must be provided by the library specified
* using the DocumentPatchBuilder.
* @param function the name of the function
* @return the specification of the function call
*/
public Call applyLibrary(String function);
/**
* Calls a function with the existing fragment and one or more
* values. The function must be provided by the library specified
* using the DocumentPatchBuilder.
* @param function the name of the function
* @param args the literal values
* @return the specification of the function call
*/
public Call applyLibraryValues(String function, Object... args);
/**
* Calls a function with the existing fragment and one or more
* specified fragments. The function must be provided by the
* library specified using the DocumentPatchBuilder.
* @param function the name of the function
* @param args the fragments
* @return the specification of the function call
*/
public Call applyLibraryFragments(String function, Object... args);
}
/**
* A PatchHandle produced by the builder can produce a string
* representation of the patch for saving, logging, or other uses.
*/
public interface PatchHandle extends DocumentPatchHandle {
/**
* Returns a JSON or XML representation of the patch as a string.
* @return the patch
*/
public String toString();
}
/**
* Returns the namespaces available for the paths
* of a patch against XML documents.
* @return the declared namespaces
*/
public IterableNamespaceContext getNamespaces();
/**
* Declares the namespaces available for the paths
* of a patch against XML documents. You can use the
* {@link com.marklogic.client.util.EditableNamespaceContext}
* class to edit a set of namespaces. Note that the
* following prefixes are predefined for internal use:
* rapi, prop, xsi, and xs
* @param namespaces the declared namespaces
*/
public void setNamespaces(IterableNamespaceContext namespaces);
/**
* Specifies an XQuery library installed on the server
* that supplies functions for modifying existing fragments.
* @param ns the XQuery library namespace
* @param at the XQuery library path on the server
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder library(String ns, String at);
/**
* Adds the specified collections.
* @param collections the collection identifiers
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder addCollection(String... collections);
/**
* Deletes the specified collections.
* @param collections the collection identifiers
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder deleteCollection(String... collections);
/**
* Replaces the specified collection.
* @param oldCollection the identifier for the existing collection
* @param newCollection the identifier for the new collection
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replaceCollection(String oldCollection, String newCollection);
/**
* Adds a role with the specified capabilities
* @param role the name of the role
* @param capabilities the set of capabilities
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder addPermission(
String role, DocumentMetadataHandle.Capability... capabilities
);
/**
* Deletes the specified roles.
* @param roles the names of the roles
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder deletePermission(String... roles);
/**
* Replaces the existing capabilities of a role.
* @param role the name of the role
* @param newCapabilities the replacing set of capabilities
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePermission(
String role, DocumentMetadataHandle.Capability... newCapabilities
);
/**
* Replaces an existing role with a new role.
* @param oldRole the name of the replaced role
* @param newRole the name of the replacing role
* @param newCapabilities the capabilities of the replacing role
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePermission(
String oldRole, String newRole, DocumentMetadataHandle.Capability... newCapabilities
);
/**
* Adds a new metadata property with a simple name.
* @param name the name of the new property
* @param value the value of the new property
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder addPropertyValue(String name, Object value);
/**
* Adds a new metadata property with a namespaced name.
* @param name the namespaced name of the new property
* @param value the value of the new property
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder addPropertyValue(QName name, Object value);
/**
* Deletes the specified metadata properties with simple names.
* @param names the property names
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder deleteProperty(String... names);
/**
* Deletes the specified metadata properties with namespaced names.
* @param names the namespaced property names
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder deleteProperty(QName... names);
/**
* Replaces the existing value of a metadata property having a simple name.
* @param name the name of the existing property
* @param newValue the new value of the property
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePropertyValue(String name, Object newValue);
/**
* Replaces the existing value of a metadata property having a namespaced name.
* @param name the namespaced name of the existing property
* @param newValue the new value of the property
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePropertyValue(QName name, Object newValue);
/**
* Replaces an existing metadata property with a new property having a simple name.
* @param oldName the name of the existing property
* @param newName the name of the replacing property
* @param newValue the value of the property
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePropertyValue(
String oldName, String newName, Object newValue
);
/**
* Replaces an existing metadata property with a new property having a namespaced name.
* @param oldName the namespaced name of the existing property
* @param newName the namespaced name of the replacing property
* @param newValue the value of the property
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePropertyValue(
QName oldName, QName newName, Object newValue
);
/**
* A factory method for building calls to modify an existing node
* by applying built-in functions or functions from a library.
* @return the builder for function calls
*/
public CallBuilder call();
/**
* Specifies a replacement operation by applying a function
* to a metadata property having a simple name. You must use
* CallBuilder to build a specification of the call.
* @param name the name of the existing property
* @param call the specification of the function call
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePropertyApply(
String name, Call call
);
/**
* Specifies a replacement operation by applying a function
* to a metadata property having a namespaced name. You must
* use CallBuilder to build a specification of the call.
* @param name the name of the existing property
* @param call the specification of the function call
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder replacePropertyApply(
QName name, Call call
);
/**
* Sets the search quality of the document.
* @param quality the new value for search quality
* @return the patch builder (for convenient chaining)
*/
public DocumentMetadataPatchBuilder setQuality(int quality);
/**
* Builds the patch that modifies the metadata or content of the
* database document and provides a handle for sending the patch
* to the server using the patch() method of the DocumentManager.
* Once the patch is built, specifying additional operation with
* the patch builder do not alter the patch built previously.
* @return the handle on the built patch
*/
public PatchHandle build() throws MarkLogicIOException;
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.server;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletResponse;
import org.jsoup.nodes.DataNode;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.DocumentType;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.parser.Tag;
import com.vaadin.annotations.Viewport;
import com.vaadin.annotations.ViewportGeneratorClass;
import com.vaadin.server.DependencyFilter.FilterContext;
import com.vaadin.server.communication.AtmospherePushConnection;
import com.vaadin.shared.ApplicationConstants;
import com.vaadin.shared.VaadinUriResolver;
import com.vaadin.shared.Version;
import com.vaadin.shared.communication.PushMode;
import com.vaadin.ui.Dependency;
import com.vaadin.ui.Dependency.Type;
import com.vaadin.ui.UI;
import com.vaadin.util.ReflectTools;
import elemental.json.Json;
import elemental.json.JsonException;
import elemental.json.JsonObject;
import elemental.json.impl.JsonUtil;
/**
* Handles the initial request to start the application.
*
* @author Vaadin Ltd
* @since 7.0.0
*
* @deprecated As of 7.0. Will likely change or be removed in a future version
*/
@Deprecated
public abstract class BootstrapHandler extends SynchronizedRequestHandler {
/**
* Parameter that is added to the UI init request if the session has already
* been restarted when generating the bootstrap HTML and ?restartApplication
* should thus be ignored when handling the UI init request.
*/
public static final String IGNORE_RESTART_PARAM = "ignoreRestart";
/**
* Provides context information for the bootstrap process.
*/
protected class BootstrapContext implements Serializable {
private final VaadinResponse response;
private final BootstrapFragmentResponse bootstrapResponse;
private String themeName;
private String appId;
private PushMode pushMode;
private JsonObject applicationParameters;
private BootstrapUriResolver uriResolver;
private WidgetsetInfo widgetsetInfo;
/**
* Creates a new context instance using the given Vaadin/HTTP response
* and bootstrap response.
*
* @param response
* the response object
* @param bootstrapResponse
* the bootstrap response object
*/
public BootstrapContext(VaadinResponse response,
BootstrapFragmentResponse bootstrapResponse) {
this.response = response;
this.bootstrapResponse = bootstrapResponse;
}
/**
* Gets the Vaadin/HTTP response.
*
* @return the Vaadin/HTTP response
*/
public VaadinResponse getResponse() {
return response;
}
/**
* Gets the Vaadin/HTTP request.
*
* @return the Vaadin/HTTP request
*/
public VaadinRequest getRequest() {
return bootstrapResponse.getRequest();
}
/**
* Gets the Vaadin session.
*
* @return the Vaadin session
*/
public VaadinSession getSession() {
return bootstrapResponse.getSession();
}
/**
* Gets the UI class which will be used.
*
* @return the UI class
*/
public Class<? extends UI> getUIClass() {
return bootstrapResponse.getUiClass();
}
/**
* Gets information about the widgetset to use.
*
* @return the widgetset which will be loaded
*/
public WidgetsetInfo getWidgetsetInfo() {
if (widgetsetInfo == null) {
widgetsetInfo = getWidgetsetForUI(this);
}
return widgetsetInfo;
}
/**
* @return returns the name of the widgetset to use
* @deprecated use {@link #getWidgetsetInfo()} instead
*/
@Deprecated
public String getWidgetsetName() {
return getWidgetsetInfo().getWidgetsetName();
}
/**
* Gets the name of the theme to use.
*
* @return the name of the theme, with special characters escaped or
* removed
*/
public String getThemeName() {
if (themeName == null) {
themeName = findAndEscapeThemeName(this);
}
return themeName;
}
/**
* Gets the push mode to use.
*
* @return the desired push mode
*/
public PushMode getPushMode() {
if (pushMode == null) {
UICreateEvent event = new UICreateEvent(getRequest(),
getUIClass());
pushMode = getBootstrapResponse().getUIProvider()
.getPushMode(event);
if (pushMode == null) {
pushMode = getRequest().getService()
.getDeploymentConfiguration().getPushMode();
}
if (pushMode.isEnabled()
&& !getRequest().getService().ensurePushAvailable()) {
/*
* Fall back if not supported (ensurePushAvailable will log
* information to the developer the first time this happens)
*/
pushMode = PushMode.DISABLED;
}
}
return pushMode;
}
/**
* Gets the application id.
*
* The application id is defined by
* {@link VaadinService#getMainDivId(VaadinSession, VaadinRequest, Class)}
*
* @return the application id
*/
public String getAppId() {
if (appId == null) {
appId = getRequest().getService().getMainDivId(getSession(),
getRequest(), getUIClass());
}
return appId;
}
/**
* Gets the bootstrap response object.
*
* @return the bootstrap response object
*/
public BootstrapFragmentResponse getBootstrapResponse() {
return bootstrapResponse;
}
/**
* Gets the application parameters specified by the BootstrapHandler.
*
* @return the application parameters which will be written on the page
*/
public JsonObject getApplicationParameters() {
if (applicationParameters == null) {
applicationParameters = BootstrapHandler.this
.getApplicationParameters(this);
}
return applicationParameters;
}
/**
* Gets the URI resolver to use for bootstrap resources.
*
* @return the URI resolver
* @since 8.1
*/
public BootstrapUriResolver getUriResolver() {
if (uriResolver == null) {
uriResolver = new BootstrapUriResolver(this);
}
return uriResolver;
}
}
/**
* The URI resolver used in the bootstrap process.
*
* @since 8.1
*/
protected static class BootstrapUriResolver extends VaadinUriResolver {
private final BootstrapContext context;
private String frontendUrl;
/**
* Creates a new bootstrap resolver based on the given bootstrap
* context.
*
* @param bootstrapContext
* the bootstrap context
*/
public BootstrapUriResolver(BootstrapContext bootstrapContext) {
context = bootstrapContext;
}
@Override
protected String getVaadinDirUrl() {
return context.getApplicationParameters()
.getString(ApplicationConstants.VAADIN_DIR_URL);
}
@Override
protected String getThemeUri() {
return getVaadinDirUrl() + "themes/" + context.getThemeName();
}
@Override
protected String getServiceUrlParameterName() {
return getConfigOrNull(
ApplicationConstants.SERVICE_URL_PARAMETER_NAME);
}
@Override
protected String getServiceUrl() {
String serviceUrl = getConfigOrNull(
ApplicationConstants.SERVICE_URL);
if (serviceUrl == null) {
return "./";
} else if (!serviceUrl.endsWith("/")) {
serviceUrl += "/";
}
return serviceUrl;
}
private String getConfigOrNull(String name) {
JsonObject parameters = context.getApplicationParameters();
if (parameters.hasKey(name)) {
return parameters.getString(name);
} else {
return null;
}
}
@Override
protected String encodeQueryStringParameterValue(String queryString) {
String encodedString = null;
try {
encodedString = URLEncoder.encode(queryString,
UTF_8.name());
} catch (UnsupportedEncodingException e) {
// should never happen
throw new RuntimeException("Could not find UTF-8", e);
}
return encodedString;
}
@Override
protected String getContextRootUrl() {
String root = context.getApplicationParameters()
.getString(ApplicationConstants.CONTEXT_ROOT_URL);
assert root.endsWith("/");
return root;
}
@Override
protected String getFrontendUrl() {
if (frontendUrl == null) {
frontendUrl = resolveFrontendUrl(context.getSession());
}
return frontendUrl;
}
}
/**
* Resolves the URL to use for the {@literal frontend://} protocol.
*
* @param session
* the session of the user to resolve the protocol for
* @return the URL that frontend:// resolves to, possibly using another
* internal protocol
* @since 8.1
*/
public static String resolveFrontendUrl(VaadinSession session) {
DeploymentConfiguration configuration = session.getConfiguration();
String frontendUrl;
if (session.getBrowser().isEs6Supported()) {
frontendUrl = configuration.getApplicationOrSystemProperty(
ApplicationConstants.FRONTEND_URL_ES6,
ApplicationConstants.FRONTEND_URL_ES6_DEFAULT_VALUE);
} else {
frontendUrl = configuration.getApplicationOrSystemProperty(
ApplicationConstants.FRONTEND_URL_ES5,
ApplicationConstants.FRONTEND_URL_ES5_DEFAULT_VALUE);
}
if (!frontendUrl.endsWith("/")) {
frontendUrl += "/";
}
return frontendUrl;
}
@Override
protected boolean canHandleRequest(VaadinRequest request) {
// We do not want to handle /APP requests here, instead let it fall
// through and produce a 404
return !ServletPortletHelper.isAppRequest(request);
}
@Override
public boolean synchronizedHandleRequest(VaadinSession session,
VaadinRequest request, VaadinResponse response) throws IOException {
try {
List<UIProvider> uiProviders = session.getUIProviders();
UIClassSelectionEvent classSelectionEvent = new UIClassSelectionEvent(
request);
// Find UI provider and UI class
Class<? extends UI> uiClass = null;
UIProvider provider = null;
for (UIProvider p : uiProviders) {
uiClass = p.getUIClass(classSelectionEvent);
// If we found something
if (uiClass != null) {
provider = p;
break;
}
}
if (provider == null) {
// Can't generate bootstrap if no UI provider matches
return false;
}
BootstrapFragmentResponse bootstrapResponse = new BootstrapFragmentResponse(
this, request, session, uiClass, new ArrayList<>(),
provider);
BootstrapContext context = new BootstrapContext(response,
bootstrapResponse);
bootstrapResponse.setUriResolver(context.getUriResolver());
setupMainDiv(context);
BootstrapFragmentResponse fragmentResponse = context
.getBootstrapResponse();
session.modifyBootstrapResponse(fragmentResponse);
String html = getBootstrapHtml(context);
writeBootstrapPage(response, html);
} catch (JsonException e) {
writeError(response, e);
}
return true;
}
private String getBootstrapHtml(BootstrapContext context) {
VaadinRequest request = context.getRequest();
VaadinResponse response = context.getResponse();
VaadinService vaadinService = request.getService();
BootstrapFragmentResponse fragmentResponse = context
.getBootstrapResponse();
if (vaadinService.isStandalone(request)) {
Map<String, Object> headers = new LinkedHashMap<>();
Document document = Document.createShell("");
BootstrapPageResponse pageResponse = new BootstrapPageResponse(this,
request, context.getSession(), context.getUIClass(),
document, headers, fragmentResponse.getUIProvider());
pageResponse.setUriResolver(context.getUriResolver());
List<Node> fragmentNodes = fragmentResponse.getFragmentNodes();
Element body = document.body();
for (Node node : fragmentNodes) {
body.appendChild(node);
}
setupStandaloneDocument(context, pageResponse);
context.getSession().modifyBootstrapResponse(pageResponse);
sendBootstrapHeaders(response, headers);
return document.outerHtml();
} else {
StringBuilder sb = new StringBuilder();
for (Node node : fragmentResponse.getFragmentNodes()) {
if (sb.length() != 0) {
sb.append('\n');
}
sb.append(node.outerHtml());
}
return sb.toString();
}
}
private void sendBootstrapHeaders(VaadinResponse response,
Map<String, Object> headers) {
Set<Entry<String, Object>> entrySet = headers.entrySet();
for (Entry<String, Object> header : entrySet) {
Object value = header.getValue();
if (value instanceof String) {
response.setHeader(header.getKey(), (String) value);
} else if (value instanceof Long) {
response.setDateHeader(header.getKey(),
((Long) value).longValue());
} else {
throw new RuntimeException(
"Unsupported header value: " + value);
}
}
}
private void writeBootstrapPage(VaadinResponse response, String html)
throws IOException {
response.setContentType(
ApplicationConstants.CONTENT_TYPE_TEXT_HTML_UTF_8);
try (BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(response.getOutputStream(), UTF_8))) {
writer.append(html);
}
}
private void setupStandaloneDocument(BootstrapContext context,
BootstrapPageResponse response) {
response.setHeader("Cache-Control", "no-cache");
response.setHeader("Pragma", "no-cache");
response.setDateHeader("Expires", 0);
Document document = response.getDocument();
DocumentType doctype = new DocumentType("html", "", "",
document.baseUri());
document.child(0).before(doctype);
Element head = document.head();
head.appendElement("meta").attr("http-equiv", "Content-Type").attr(
"content", ApplicationConstants.CONTENT_TYPE_TEXT_HTML_UTF_8);
// Force IE 11 to use IE 11 mode.
head.appendElement("meta").attr("http-equiv", "X-UA-Compatible")
.attr("content", "IE=11");
Class<? extends UI> uiClass = context.getUIClass();
String viewportContent = null;
Viewport viewportAnnotation = uiClass.getAnnotation(Viewport.class);
ViewportGeneratorClass viewportGeneratorClassAnnotation = uiClass
.getAnnotation(ViewportGeneratorClass.class);
if (viewportAnnotation != null
&& viewportGeneratorClassAnnotation != null) {
throw new IllegalStateException(uiClass.getCanonicalName()
+ " cannot be annotated with both @"
+ Viewport.class.getSimpleName() + " and @"
+ ViewportGeneratorClass.class.getSimpleName());
}
if (viewportAnnotation != null) {
viewportContent = viewportAnnotation.value();
} else if (viewportGeneratorClassAnnotation != null) {
Class<? extends ViewportGenerator> viewportGeneratorClass = viewportGeneratorClassAnnotation
.value();
try {
viewportContent = ReflectTools
.createInstance(viewportGeneratorClass)
.getViewport(context.getRequest());
} catch (Exception e) {
throw new RuntimeException(
"Error processing viewport generator "
+ viewportGeneratorClass.getCanonicalName(),
e);
}
}
if (viewportContent != null) {
head.appendElement("meta").attr("name", "viewport").attr("content",
viewportContent);
}
String title = response.getUIProvider().getPageTitle(
new UICreateEvent(context.getRequest(), context.getUIClass()));
if (title != null) {
head.appendElement("title").appendText(title);
}
head.appendElement("style").attr("type", "text/css")
.appendText("html, body {height:100%;margin:0;}");
// Add favicon links
String themeName = context.getThemeName();
if (themeName != null) {
String themeUri = getThemeUri(context, themeName);
head.appendElement("link").attr("rel", "shortcut icon")
.attr("type", "image/vnd.microsoft.icon")
.attr("href", themeUri + "/favicon.ico");
head.appendElement("link").attr("rel", "icon")
.attr("type", "image/vnd.microsoft.icon")
.attr("href", themeUri + "/favicon.ico");
}
Collection<? extends Dependency> deps = Dependency.findDependencies(
Collections.singletonList(uiClass),
context.getSession().getCommunicationManager(),
new FilterContext(context.getSession()));
for (Dependency dependency : deps) {
Type type = dependency.getType();
String url = context.getUriResolver()
.resolveVaadinUri(dependency.getUrl());
if (type == Type.HTMLIMPORT) {
head.appendElement("link").attr("rel", "import").attr("href",
url);
} else if (type == Type.JAVASCRIPT) {
head.appendElement("script").attr("type", "text/javascript")
.attr("src", url);
} else if (type == Type.STYLESHEET) {
head.appendElement("link").attr("rel", "stylesheet")
.attr("type", "text/css").attr("href", url);
} else {
getLogger().severe("Ignoring unknown dependency type "
+ dependency.getType());
}
}
Element body = document.body();
body.attr("scroll", "auto");
body.addClass(ApplicationConstants.GENERATED_BODY_CLASSNAME);
}
private static Logger getLogger() {
return Logger.getLogger(BootstrapHandler.class.getName());
}
protected String getMainDivStyle(BootstrapContext context) {
return null;
}
public WidgetsetInfo getWidgetsetForUI(BootstrapContext context) {
VaadinRequest request = context.getRequest();
UICreateEvent event = new UICreateEvent(context.getRequest(),
context.getUIClass());
WidgetsetInfo widgetset = context.getBootstrapResponse().getUIProvider()
.getWidgetsetInfo(event);
if (widgetset == null) {
// TODO do we want to move WidgetsetInfoImpl elsewhere?
widgetset = new WidgetsetInfoImpl(
request.getService().getConfiguredWidgetset(request));
}
return widgetset;
}
/**
* Method to write the div element into which that actual Vaadin application
* is rendered.
* <p>
* Override this method if you want to add some custom html around around
* the div element into which the actual Vaadin application will be
* rendered.
*
* @param context
*
* @throws IOException
*/
private void setupMainDiv(BootstrapContext context) throws IOException {
String style = getMainDivStyle(context);
/*- Add classnames;
* .v-app
* .v-app-loading
*- Additionally added from javascript:
* <themeName, remove non-alphanum>
*/
List<Node> fragmentNodes = context.getBootstrapResponse()
.getFragmentNodes();
Element mainDiv = new Element(Tag.valueOf("div"), "");
mainDiv.attr("id", context.getAppId());
mainDiv.addClass("v-app");
mainDiv.addClass(context.getThemeName());
mainDiv.addClass(context.getUIClass().getSimpleName()
.toLowerCase(Locale.ROOT));
if (style != null && !style.isEmpty()) {
mainDiv.attr("style", style);
}
mainDiv.appendElement("div").addClass("v-app-loading");
mainDiv.appendElement("noscript").append(
"You have to enable javascript in your browser to use an application built with Vaadin.");
fragmentNodes.add(mainDiv);
VaadinRequest request = context.getRequest();
VaadinService vaadinService = request.getService();
String vaadinLocation = vaadinService.getStaticFileLocation(request)
+ "/VAADIN/";
// Parameter appended to JS to bypass caches after version upgrade.
String versionQueryParam = "?v=" + Version.getFullVersion();
if (context.getPushMode().isEnabled()) {
// Load client-side dependencies for push support
String pushJS = vaadinLocation;
if (context.getRequest().getService().getDeploymentConfiguration()
.isProductionMode()) {
pushJS += ApplicationConstants.VAADIN_PUSH_JS;
} else {
pushJS += ApplicationConstants.VAADIN_PUSH_DEBUG_JS;
}
pushJS += versionQueryParam;
fragmentNodes.add(new Element(Tag.valueOf("script"), "")
.attr("type", "text/javascript").attr("src", pushJS));
}
String bootstrapLocation = vaadinLocation
+ ApplicationConstants.VAADIN_BOOTSTRAP_JS + versionQueryParam;
fragmentNodes.add(new Element(Tag.valueOf("script"), "")
.attr("type", "text/javascript")
.attr("src", bootstrapLocation));
Element mainScriptTag = new Element(Tag.valueOf("script"), "")
.attr("type", "text/javascript");
StringBuilder builder = new StringBuilder();
builder.append("//<![CDATA[\n");
builder.append("if (!window.vaadin) alert(" + JsonUtil.quote(
"Failed to load the bootstrap javascript: " + bootstrapLocation)
+ ");\n");
appendMainScriptTagContents(context, builder);
builder.append("//]]>");
mainScriptTag.appendChild(
new DataNode(builder.toString(), mainScriptTag.baseUri()));
fragmentNodes.add(mainScriptTag);
}
protected void appendMainScriptTagContents(BootstrapContext context,
StringBuilder builder) throws IOException {
JsonObject appConfig = context.getApplicationParameters();
boolean isDebug = !context.getSession().getConfiguration()
.isProductionMode();
if (isDebug) {
/*
* Add tracking needed for getting bootstrap metrics to the client
* side Profiler if another implementation hasn't already been
* added.
*/
builder.append(
"if (typeof window.__gwtStatsEvent != 'function') {\n");
builder.append("vaadin.gwtStatsEvents = [];\n");
builder.append(
"window.__gwtStatsEvent = function(event) {vaadin.gwtStatsEvents.push(event); return true;};\n");
builder.append("}\n");
}
builder.append("vaadin.initApplication(\"");
builder.append(context.getAppId());
builder.append("\",");
appendJsonObject(builder, appConfig, isDebug);
builder.append(");\n");
}
private static void appendJsonObject(StringBuilder builder,
JsonObject jsonObject, boolean isDebug) {
if (isDebug) {
builder.append(JsonUtil.stringify(jsonObject, 4));
} else {
builder.append(JsonUtil.stringify(jsonObject));
}
}
protected JsonObject getApplicationParameters(BootstrapContext context) {
VaadinRequest request = context.getRequest();
VaadinSession session = context.getSession();
VaadinService vaadinService = request.getService();
JsonObject appConfig = Json.createObject();
String themeName = context.getThemeName();
if (themeName != null) {
appConfig.put("theme", themeName);
}
// Ignore restartApplication that might be passed to UI init
if (request.getParameter(
VaadinService.URL_PARAMETER_RESTART_APPLICATION) != null) {
appConfig.put("extraParams", "&" + IGNORE_RESTART_PARAM + "=1");
}
JsonObject versionInfo = Json.createObject();
versionInfo.put("vaadinVersion", Version.getFullVersion());
String atmosphereVersion = AtmospherePushConnection
.getAtmosphereVersion();
if (atmosphereVersion != null) {
versionInfo.put("atmosphereVersion", atmosphereVersion);
}
appConfig.put("versionInfo", versionInfo);
WidgetsetInfo widgetsetInfo = context.getWidgetsetInfo();
appConfig.put("widgetset", VaadinServlet
.stripSpecialChars(widgetsetInfo.getWidgetsetName()));
// add widgetset url if not null
if (widgetsetInfo.getWidgetsetUrl() != null) {
appConfig.put("widgetsetUrl", widgetsetInfo.getWidgetsetUrl());
}
appConfig.put("widgetsetReady", !widgetsetInfo.isCdn());
// Use locale from session if set, else from the request
Locale locale = ServletPortletHelper.findLocale(null,
context.getSession(), context.getRequest());
// Get system messages
SystemMessages systemMessages = vaadinService.getSystemMessages(locale,
request);
if (systemMessages != null) {
// Write the CommunicationError -message to client
JsonObject comErrMsg = Json.createObject();
putValueOrNull(comErrMsg, "caption",
systemMessages.getCommunicationErrorCaption());
putValueOrNull(comErrMsg, "message",
systemMessages.getCommunicationErrorMessage());
putValueOrNull(comErrMsg, "url",
systemMessages.getCommunicationErrorURL());
appConfig.put("comErrMsg", comErrMsg);
JsonObject authErrMsg = Json.createObject();
putValueOrNull(authErrMsg, "caption",
systemMessages.getAuthenticationErrorCaption());
putValueOrNull(authErrMsg, "message",
systemMessages.getAuthenticationErrorMessage());
putValueOrNull(authErrMsg, "url",
systemMessages.getAuthenticationErrorURL());
appConfig.put("authErrMsg", authErrMsg);
JsonObject sessExpMsg = Json.createObject();
putValueOrNull(sessExpMsg, "caption",
systemMessages.getSessionExpiredCaption());
putValueOrNull(sessExpMsg, "message",
systemMessages.getSessionExpiredMessage());
putValueOrNull(sessExpMsg, "url",
systemMessages.getSessionExpiredURL());
appConfig.put("sessExpMsg", sessExpMsg);
}
appConfig.put(ApplicationConstants.CONTEXT_ROOT_URL,
getContextRootPath(context));
// getStaticFileLocation documented to never end with a slash
// vaadinDir should always end with a slash
String vaadinDir = vaadinService.getStaticFileLocation(request)
+ "/VAADIN/";
appConfig.put(ApplicationConstants.VAADIN_DIR_URL, vaadinDir);
appConfig.put(ApplicationConstants.FRONTEND_URL,
context.getUriResolver().getFrontendUrl());
if (!session.getConfiguration().isProductionMode()) {
appConfig.put("debug", true);
}
if (vaadinService.isStandalone(request)) {
appConfig.put("standalone", true);
}
appConfig.put("heartbeatInterval", vaadinService
.getDeploymentConfiguration().getHeartbeatInterval());
String serviceUrl = getServiceUrl(context);
if (serviceUrl != null) {
appConfig.put(ApplicationConstants.SERVICE_URL, serviceUrl);
}
boolean sendUrlsAsParameters = vaadinService
.getDeploymentConfiguration().isSendUrlsAsParameters();
if (!sendUrlsAsParameters) {
appConfig.put("sendUrlsAsParameters", false);
}
return appConfig;
}
/**
* @since 8.0.3
*/
protected abstract String getContextRootPath(BootstrapContext context);
protected abstract String getServiceUrl(BootstrapContext context);
/**
* Get the URI for the application theme.
*
* A portal-wide default theme is fetched from the portal shared resource
* directory (if any), other themes from the portlet.
*
* @param context
* @param themeName
*
* @return
*/
public String getThemeUri(BootstrapContext context, String themeName) {
VaadinRequest request = context.getRequest();
final String staticFilePath = request.getService()
.getStaticFileLocation(request);
return staticFilePath + "/" + VaadinServlet.THEME_DIR_PATH + '/'
+ themeName;
}
/**
* Override if required.
*
* @param context
* @return
*/
public String getThemeName(BootstrapContext context) {
UICreateEvent event = new UICreateEvent(context.getRequest(),
context.getUIClass());
return context.getBootstrapResponse().getUIProvider().getTheme(event);
}
/**
* Do not override.
*
* @param context
* @return
*/
public String findAndEscapeThemeName(BootstrapContext context) {
String themeName = getThemeName(context);
if (themeName == null) {
VaadinRequest request = context.getRequest();
themeName = request.getService().getConfiguredTheme(request);
}
// XSS preventation, theme names shouldn't contain special chars anyway.
// The servlet denies them via url parameter.
themeName = VaadinServlet.stripSpecialChars(themeName);
return themeName;
}
protected void writeError(VaadinResponse response, Throwable e)
throws IOException {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
e.getLocalizedMessage());
}
private void putValueOrNull(JsonObject object, String key, String value) {
assert object != null;
assert key != null;
if (value == null) {
object.put(key, Json.createNull());
} else {
object.put(key, value);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.dataframe.evaluation.classification;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields;
import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult;
import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationParameters;
import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty;
import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ClassificationTests extends AbstractSerializingTestCase<Classification> {
private static final EvaluationParameters EVALUATION_PARAMETERS = new EvaluationParameters(100);
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(MlEvaluationNamedXContentProvider.getNamedWriteables());
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
}
public static Classification createRandom() {
List<EvaluationMetric> metrics =
randomSubsetOf(
Arrays.asList(
AccuracyTests.createRandom(),
AucRocTests.createRandom(),
PrecisionTests.createRandom(),
RecallTests.createRandom(),
MulticlassConfusionMatrixTests.createRandom()));
boolean usesAucRoc = metrics.stream().map(EvaluationMetric::getName).anyMatch(n -> AucRoc.NAME.getPreferredName().equals(n));
return new Classification(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
// If AucRoc is to be calculated, the top_classes field is required
(usesAucRoc || randomBoolean()) ? randomAlphaOfLength(10) : null,
metrics.isEmpty() ? null : metrics);
}
@Override
protected Classification doParseInstance(XContentParser parser) throws IOException {
return Classification.fromXContent(parser);
}
@Override
protected Classification createTestInstance() {
return createRandom();
}
@Override
protected Writeable.Reader<Classification> instanceReader() {
return Classification::new;
}
public void testConstructor_GivenMissingField() {
FakeClassificationMetric metric = new FakeClassificationMetric("fake");
ElasticsearchStatusException e =
expectThrows(
ElasticsearchStatusException.class,
() -> new Classification("foo", null, null, Collections.singletonList(metric)));
assertThat(
e.getMessage(),
is(equalTo("[classification] must define [predicted_field] as required by the following metrics [fake]")));
}
public void testConstructor_GivenEmptyMetrics() {
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> new Classification("foo", "bar", "results", Collections.emptyList()));
assertThat(e.getMessage(), equalTo("[classification] must have one or more metrics"));
}
public void testConstructor_GivenDefaultMetrics() {
Classification classification = new Classification("actual", "predicted", null, null);
List<EvaluationMetric> metrics = classification.getMetrics();
assertThat(metrics, containsInAnyOrder(new Accuracy(), new MulticlassConfusionMatrix(), new Precision(), new Recall()));
}
public void testGetFields() {
Classification evaluation = new Classification("foo", "bar", "results", null);
EvaluationFields fields = evaluation.getFields();
assertThat(fields.getActualField(), is(equalTo("foo")));
assertThat(fields.getPredictedField(), is(equalTo("bar")));
assertThat(fields.getTopClassesField(), is(equalTo("results")));
assertThat(fields.getPredictedClassField(), is(equalTo("results.class_name")));
assertThat(fields.getPredictedProbabilityField(), is(equalTo("results.class_probability")));
assertThat(fields.isPredictedProbabilityFieldNested(), is(true));
}
public void testBuildSearch_WithDefaultNonRequiredNestedFields() {
QueryBuilder userProvidedQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value"));
QueryBuilder expectedSearchQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.existsQuery("act"))
.filter(QueryBuilders.existsQuery("pred"))
.filter(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value")));
Classification evaluation = new Classification("act", "pred", null, Arrays.asList(new MulticlassConfusionMatrix()));
SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(EVALUATION_PARAMETERS, userProvidedQuery);
assertThat(searchSourceBuilder.query(), equalTo(expectedSearchQuery));
assertThat(searchSourceBuilder.aggregations().count(), greaterThan(0));
}
public void testBuildSearch_WithExplicitNonRequiredNestedFields() {
QueryBuilder userProvidedQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value"));
QueryBuilder expectedSearchQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.existsQuery("act"))
.filter(QueryBuilders.existsQuery("pred"))
.filter(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value")));
Classification evaluation = new Classification("act", "pred", "results", Arrays.asList(new MulticlassConfusionMatrix()));
SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(EVALUATION_PARAMETERS, userProvidedQuery);
assertThat(searchSourceBuilder.query(), equalTo(expectedSearchQuery));
assertThat(searchSourceBuilder.aggregations().count(), greaterThan(0));
}
public void testBuildSearch_WithDefaultRequiredNestedFields() {
QueryBuilder userProvidedQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value"));
QueryBuilder expectedSearchQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.existsQuery("act"))
.filter(
QueryBuilders.nestedQuery("ml.top_classes", QueryBuilders.existsQuery("ml.top_classes.class_name"), ScoreMode.None)
.ignoreUnmapped(true))
.filter(
QueryBuilders.nestedQuery(
"ml.top_classes", QueryBuilders.existsQuery("ml.top_classes.class_probability"), ScoreMode.None)
.ignoreUnmapped(true))
.filter(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value")));
Classification evaluation = new Classification("act", "pred", null, Arrays.asList(new AucRoc(false, "some-value")));
SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(EVALUATION_PARAMETERS, userProvidedQuery);
assertThat(searchSourceBuilder.query(), equalTo(expectedSearchQuery));
assertThat(searchSourceBuilder.aggregations().count(), greaterThan(0));
}
public void testBuildSearch_WithExplicitRequiredNestedFields() {
QueryBuilder userProvidedQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value"));
QueryBuilder expectedSearchQuery =
QueryBuilders.boolQuery()
.filter(QueryBuilders.existsQuery("act"))
.filter(
QueryBuilders.nestedQuery("results", QueryBuilders.existsQuery("results.class_name"), ScoreMode.None)
.ignoreUnmapped(true))
.filter(
QueryBuilders.nestedQuery("results", QueryBuilders.existsQuery("results.class_probability"), ScoreMode.None)
.ignoreUnmapped(true))
.filter(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("field_A", "some-value"))
.filter(QueryBuilders.termQuery("field_B", "some-other-value")));
Classification evaluation = new Classification("act", "pred", "results", Arrays.asList(new AucRoc(false, "some-value")));
SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(EVALUATION_PARAMETERS, userProvidedQuery);
assertThat(searchSourceBuilder.query(), equalTo(expectedSearchQuery));
assertThat(searchSourceBuilder.aggregations().count(), greaterThan(0));
}
public void testProcess_MultipleMetricsWithDifferentNumberOfSteps() {
EvaluationMetric metric1 = new FakeClassificationMetric("fake_metric_1", 2);
EvaluationMetric metric2 = new FakeClassificationMetric("fake_metric_2", 3);
EvaluationMetric metric3 = new FakeClassificationMetric("fake_metric_3", 4);
EvaluationMetric metric4 = new FakeClassificationMetric("fake_metric_4", 5);
Classification evaluation = new Classification("act", "pred", null, Arrays.asList(metric1, metric2, metric3, metric4));
assertThat(metric1.getResult(), isEmpty());
assertThat(metric2.getResult(), isEmpty());
assertThat(metric3.getResult(), isEmpty());
assertThat(metric4.getResult(), isEmpty());
assertThat(evaluation.hasAllResults(), is(false));
evaluation.process(mockSearchResponseWithNonZeroTotalHits());
assertThat(metric1.getResult(), isEmpty());
assertThat(metric2.getResult(), isEmpty());
assertThat(metric3.getResult(), isEmpty());
assertThat(metric4.getResult(), isEmpty());
assertThat(evaluation.hasAllResults(), is(false));
evaluation.process(mockSearchResponseWithNonZeroTotalHits());
assertThat(metric1.getResult(), isPresent());
assertThat(metric2.getResult(), isEmpty());
assertThat(metric3.getResult(), isEmpty());
assertThat(metric4.getResult(), isEmpty());
assertThat(evaluation.hasAllResults(), is(false));
evaluation.process(mockSearchResponseWithNonZeroTotalHits());
assertThat(metric1.getResult(), isPresent());
assertThat(metric2.getResult(), isPresent());
assertThat(metric3.getResult(), isEmpty());
assertThat(metric4.getResult(), isEmpty());
assertThat(evaluation.hasAllResults(), is(false));
evaluation.process(mockSearchResponseWithNonZeroTotalHits());
assertThat(metric1.getResult(), isPresent());
assertThat(metric2.getResult(), isPresent());
assertThat(metric3.getResult(), isPresent());
assertThat(metric4.getResult(), isEmpty());
assertThat(evaluation.hasAllResults(), is(false));
evaluation.process(mockSearchResponseWithNonZeroTotalHits());
assertThat(metric1.getResult(), isPresent());
assertThat(metric2.getResult(), isPresent());
assertThat(metric3.getResult(), isPresent());
assertThat(metric4.getResult(), isPresent());
assertThat(evaluation.hasAllResults(), is(true));
evaluation.process(mockSearchResponseWithNonZeroTotalHits());
assertThat(metric1.getResult(), isPresent());
assertThat(metric2.getResult(), isPresent());
assertThat(metric3.getResult(), isPresent());
assertThat(metric4.getResult(), isPresent());
assertThat(evaluation.hasAllResults(), is(true));
}
private static SearchResponse mockSearchResponseWithNonZeroTotalHits() {
SearchResponse searchResponse = mock(SearchResponse.class);
SearchHits hits = new SearchHits(SearchHits.EMPTY, new TotalHits(10, TotalHits.Relation.EQUAL_TO), 0);
when(searchResponse.getHits()).thenReturn(hits);
return searchResponse;
}
/**
* Metric which iterates through its steps in {@link #process} method.
* Number of steps is configurable.
* Upon reaching the last step, the result is produced.
*/
private static class FakeClassificationMetric implements EvaluationMetric {
private final String name;
private final int numSteps;
private int currentStepIndex;
private EvaluationMetricResult result;
FakeClassificationMetric(String name) {
this(name, 1);
}
FakeClassificationMetric(String name, int numSteps) {
this.name = name;
this.numSteps = numSteps;
}
@Override
public String getName() {
return name;
}
@Override
public String getWriteableName() {
return name;
}
@Override
public Set<String> getRequiredFields() {
return Sets.newHashSet(EvaluationFields.ACTUAL_FIELD.getPreferredName(), EvaluationFields.PREDICTED_FIELD.getPreferredName());
}
@Override
public Tuple<List<AggregationBuilder>, List<PipelineAggregationBuilder>> aggs(EvaluationParameters parameters,
EvaluationFields fields) {
return Tuple.tuple(List.of(), List.of());
}
@Override
public void process(Aggregations aggs) {
if (result != null) {
return;
}
currentStepIndex++;
if (currentStepIndex == numSteps) {
// This is the last step, time to write evaluation result
result = mock(EvaluationMetricResult.class);
}
}
@Override
public Optional<EvaluationMetricResult> getResult() {
return Optional.ofNullable(result);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) {
return builder;
}
@Override
public void writeTo(StreamOutput out) {
}
}
}
| |
/* *******************************************************************************************************
Copyright (c) 2015 EXILANT Technologies Private Limited
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
******************************************************************************************************** */
package com.exilant.exility.core;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/***
* manages messages
*
*/
public class Messages {
private static final String[] HEADER_ROW = { "name", "text", "severity" };
/**
* input grid has less rows than the min rows expected
*/
public static final String EXIL_MIN_ROWS = "exilMinRows";
/**
* input grid has more rows than the max rows expected
*/
public static final String EXIL_MAX_ROWS = "exilMixRows";
private static final Map<String, String> internalMessages = new HashMap<String, String>();
private static Messages instance = new Messages();
Map<String, Message> messages = new HashMap<String, Message>();
static {
internalMessages.put(EXIL_MIN_ROWS,
"A minimum of @2 rows expected in table @1");
internalMessages.put(EXIL_MAX_ROWS,
"A maximum of @2 rows expected in table @1");
}
static Messages getInstance() {
return Messages.instance;
}
/**
* default
*/
public Messages() {
}
/**
* get a message
*
* @param messageName
* @return message with this name, or null if it is not there
*/
public static Message getMessage(String messageName) {
return Messages.instance.messages.get(messageName);
}
/**
* get all messages. to be re-looked and see why is some one getting this.
*
* @return all messages
*/
public Map<String, Message> getMessages() {
return this.messages;
}
/**
* set/replace a message
*
* @param message
*/
public static void setMessage(Message message) {
Messages.instance.messages.remove(message.name);
Messages.instance.messages.put(message.name, message);
}
/**
* remove a message
*
* @param message
*/
public static void removeMessage(Message message) {
Messages.instance.messages.remove(message.name);
}
/**
* get a message. If this is not found, a new one is created with ERROR
* severity. Whenever end users see this message, it is a reminder for
* project to add this message to messages file
*
* @param code
* @param parameters
* @return message
*/
public static Message getMessage(String code, String[] parameters) {
Message messageToReturn = null;
if (Messages.instance.messages.containsKey(code)) {
return Messages.instance.messages.get(code).getFormattedMessage(
parameters);
}
// OK The message is not defined. Let us create a new one and return
messageToReturn = new Message();
messageToReturn.name = code;
messageToReturn.severity = MessageSeverity.ERROR;
String txt = code;
if (parameters != null) {
for (String parameter : parameters) {
txt += " : " + parameter + '\t';
}
messageToReturn.text = txt;
}
Spit.out("Message "
+ code
+ " is not defined. An error message with this name is created and added.");
return messageToReturn;
}
static void reload(boolean removeExistingMessages) {
if (removeExistingMessages || Messages.instance == null) {
Messages.instance = new Messages();
}
Messages.load();
}
static synchronized void load() {
try {
Map<String, Object> msgs = ResourceManager.loadFromFileOrFolder(
"messages", "message", ".xml");
for (String fileName : msgs.keySet()) {
Object obj = msgs.get(fileName);
if (obj instanceof Messages == false) {
Spit.out("message folder contains an xml that is not messages. File ignored.");
continue;
}
Messages msg = (Messages) obj;
Messages.instance.copyFrom(msg);
}
Spit.out(Messages.instance.messages.size() + " messages loaded.");
} catch (Exception e) {
Spit.out("Unable to load messages. Error : " + e.getMessage());
Spit.out(e);
}
/*
* we have some issue with projects over-riding severity of exility
* error. Since transaction processing depends on this, we have to
* over-ride these back to errors
*/
Message msg = instance.messages.get(Message.EXILITY_ERROR);
if (msg != null) {
msg.severity = MessageSeverity.ERROR;
}
}
private void copyFrom(Messages msgs) {
for (Message m : msgs.messages.values()) {
if (this.messages.containsKey(m.name)) {
Spit.out("Error : message " + m.name
+ " is defined more than once");
continue;
}
this.messages.put(m.name, m);
}
}
static String getMessageText(String code) {
Message message = Messages.instance.messages.get(code);
if (message == null) {
return code + " is not defined.";
}
return message.text;
}
static MessageSeverity getSeverity(String code) {
Message message = Messages.instance.messages.get(code);
if (message == null) {
return MessageSeverity.UNDEFINED;
}
return message.severity;
}
/**
* return all messages as a grid, with first row as header.
*
* @return all messages as an arroy of messages, with first row as header.
*/
public static String[][] getAllInGrid() {
String[] names = Messages.instance.messages.keySet().toArray(
new String[0]);
Arrays.sort(names);
String[][] rows = new String[names.length + 1][];
rows[0] = Messages.HEADER_ROW;
int i = 1;
for (String aname : names) {
String[] arow = new String[3];
arow[0] = aname;
Message msg = Messages.instance.messages.get(aname);
arow[1] = msg.text;
arow[2] = msg.severity.toString();
rows[i] = arow;
i++;
}
return rows;
}
/**
* get messages that start with given string
*
* @param startngName
* @return matching list of messages
*/
public static String[][] getMatchingMessages(String startngName) {
String stringToMatch = startngName.toLowerCase();
String[] names = Messages.instance.messages.keySet().toArray(
new String[0]);
int nbr = 0; // number filtered
for (int i = 0; i < names.length; i++) {
if (names[i].toLowerCase().startsWith(stringToMatch)) {
names[nbr] = names[i];
nbr++;
}
}
String[] filteredNames = new String[nbr];
for (int i = 0; i < filteredNames.length; i++) {
filteredNames[i] = names[i];
}
Arrays.sort(filteredNames);
String[][] rows = new String[filteredNames.length + 1][];
rows[0] = Messages.HEADER_ROW;
int i = 1;
for (String aname : filteredNames) {
String[] arow = new String[3];
arow[0] = aname;
Message msg = Messages.instance.messages.get(aname);
arow[1] = msg.text;
arow[2] = msg.severity.toString();
rows[i] = arow;
i++;
}
return rows;
}
/***
* get messages that are meant for client
*
* @return messages that are meant for client
*/
static Collection<Message> getClientMessages() {
Collection<Message> msgs = new ArrayList<Message>();
for (Message msg : Messages.instance.messages.values()) {
if (msg.forClient) {
msgs.add(msg);
}
}
return msgs;
}
/**
* @return sorted array of message names
*/
public static String[] gelAllMessages() {
String[] allOfThem = getInstance().messages.keySet().toArray(
new String[0]);
Arrays.sort(allOfThem);
return allOfThem;
}
}
| |
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Point;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.webkit.JavascriptInterface;
import android.widget.EditText;
import android.widget.Toast;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.DataChannel;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.StatsObserver;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoRenderer.I420Frame;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.util.LinkedList;
import java.util.List;
/**
* Main Activity of the AppRTCDemo Android app demonstrating interoperability
* between the Android/Java implementation of PeerConnection and the
* apprtc.appspot.com demo webapp.
*/
public class AppRTCDemoActivity extends Activity
implements AppRTCClient.IceServersObserver {
private static final String TAG = "AppRTCDemoActivity";
private PeerConnection pc;
private final PCObserver pcObserver = new PCObserver();
private final SDPObserver sdpObserver = new SDPObserver();
private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler();
private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this);
private VideoStreamsView vsv;
private Toast logToast;
private LinkedList<IceCandidate> queuedRemoteCandidates =
new LinkedList<IceCandidate>();
// Synchronize on quit[0] to avoid teardown-related crashes.
private final Boolean[] quit = new Boolean[] { false };
private MediaConstraints sdpMediaConstraints;
private PowerManager.WakeLock wakeLock;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Since the error-handling of this demo consists of throwing
// RuntimeExceptions and we assume that'll terminate the app, we install
// this default handler so it's applied to background threads as well.
Thread.setDefaultUncaughtExceptionHandler(
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
e.printStackTrace();
System.exit(-1);
}
});
PowerManager powerManager = (PowerManager) getSystemService(POWER_SERVICE);
wakeLock = powerManager.newWakeLock(
PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "AppRTCDemo");
wakeLock.acquire();
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getSize(displaySize);
vsv = new VideoStreamsView(this, displaySize);
setContentView(vsv);
abortUnless(PeerConnectionFactory.initializeAndroidGlobals(this),
"Failed to initializeAndroidGlobals");
AudioManager audioManager =
((AudioManager) getSystemService(AUDIO_SERVICE));
audioManager.setMode(audioManager.isWiredHeadsetOn() ?
AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(!audioManager.isWiredHeadsetOn());
sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveAudio", "true"));
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "true"));
final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction())) {
connectToRoom(intent.getData().toString());
return;
}
showGetRoomUI();
}
private void showGetRoomUI() {
final EditText roomInput = new EditText(this);
roomInput.setText("https://apprtc.appspot.com/?r=");
roomInput.setSelection(roomInput.getText().length());
DialogInterface.OnClickListener listener =
new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int which) {
abortUnless(which == DialogInterface.BUTTON_POSITIVE, "lolwat?");
dialog.dismiss();
connectToRoom(roomInput.getText().toString());
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder
.setMessage("Enter room URL").setView(roomInput)
.setPositiveButton("Go!", listener).show();
}
private void connectToRoom(String roomUrl) {
logAndToast("Connecting to room...");
appRtcClient.connectToRoom(roomUrl);
}
@Override
public void onPause() {
super.onPause();
vsv.onPause();
// TODO(fischman): IWBN to support pause/resume, but the WebRTC codebase
// isn't ready for that yet; e.g.
// https://code.google.com/p/webrtc/issues/detail?id=1407
// Instead, simply exit instead of pausing (the alternative leads to
// system-borking with wedged cameras; e.g. b/8224551)
disconnectAndExit();
}
@Override
public void onResume() {
// The onResume() is a lie! See TODO(fischman) in onPause() above.
super.onResume();
vsv.onResume();
}
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
PeerConnectionFactory factory = new PeerConnectionFactory();
pc = factory.createPeerConnection(
iceServers, appRtcClient.pcConstraints(), pcObserver);
{
final PeerConnection finalPC = pc;
final Runnable repeatedStatsLogger = new Runnable() {
public void run() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
final Runnable runnableThis = this;
boolean success = finalPC.getStats(new StatsObserver() {
public void onComplete(StatsReport[] reports) {
for (StatsReport report : reports) {
Log.d(TAG, "Stats: " + report.toString());
}
vsv.postDelayed(runnableThis, 10000);
}
}, null);
if (!success) {
throw new RuntimeException("getStats() return false!");
}
}
}
};
vsv.postDelayed(repeatedStatsLogger, 10000);
}
{
logAndToast("Creating local video source...");
VideoCapturer capturer = getVideoCapturer();
VideoSource videoSource = factory.createVideoSource(
capturer, appRtcClient.videoConstraints());
MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks(
vsv, VideoStreamsView.Endpoint.LOCAL)));
lMS.addTrack(videoTrack);
lMS.addTrack(factory.createAudioTrack("ARDAMSa0"));
pc.addStream(lMS, new MediaConstraints());
}
logAndToast("Waiting for ICE candidates...");
}
// Cycle through likely device names for the camera and return the first
// capturer that works, or crash if none do.
private VideoCapturer getVideoCapturer() {
String[] cameraFacing = { "front", "back" };
int[] cameraIndex = { 0, 1 };
int[] cameraOrientation = { 0, 90, 180, 270 };
for (String facing : cameraFacing) {
for (int index : cameraIndex) {
for (int orientation : cameraOrientation) {
String name = "Camera " + index + ", Facing " + facing +
", Orientation " + orientation;
VideoCapturer capturer = VideoCapturer.create(name);
if (capturer != null) {
logAndToast("Using camera: " + name);
return capturer;
}
}
}
}
throw new RuntimeException("Failed to open capturer");
}
@Override
public void onDestroy() {
super.onDestroy();
}
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
}
// Send |json| to the underlying AppEngine Channel.
private void sendMessage(JSONObject json) {
appRtcClient.sendMessage(json.toString());
}
// Put a |key|->|value| mapping in |json|.
private static void jsonPut(JSONObject json, String key, Object value) {
try {
json.put(key, value);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Implementation detail: observe ICE & stream changes and react accordingly.
private class PCObserver implements PeerConnection.Observer {
@Override public void onIceCandidate(final IceCandidate candidate){
runOnUiThread(new Runnable() {
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "candidate");
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
sendMessage(json);
}
});
}
@Override public void onError(){
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("PeerConnection error!");
}
});
}
@Override public void onSignalingChange(
PeerConnection.SignalingState newState) {
}
@Override public void onIceConnectionChange(
PeerConnection.IceConnectionState newState) {
}
@Override public void onIceGatheringChange(
PeerConnection.IceGatheringState newState) {
}
@Override public void onAddStream(final MediaStream stream){
runOnUiThread(new Runnable() {
public void run() {
abortUnless(stream.audioTracks.size() == 1 &&
stream.videoTracks.size() == 1,
"Weird-looking stream: " + stream);
stream.videoTracks.get(0).addRenderer(new VideoRenderer(
new VideoCallbacks(vsv, VideoStreamsView.Endpoint.REMOTE)));
}
});
}
@Override public void onRemoveStream(final MediaStream stream){
runOnUiThread(new Runnable() {
public void run() {
stream.videoTracks.get(0).dispose();
}
});
}
@Override public void onDataChannel(final DataChannel dc) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException(
"AppRTC doesn't use data channels, but got: " + dc.label() +
" anyway!");
}
});
}
}
// Implementation detail: handle offer creation/signaling and answer setting,
// as well as adding remote ICE candidates once the answer SDP is set.
private class SDPObserver implements SdpObserver {
@Override public void onCreateSuccess(final SessionDescription sdp) {
runOnUiThread(new Runnable() {
public void run() {
logAndToast("Sending " + sdp.type);
JSONObject json = new JSONObject();
jsonPut(json, "type", sdp.type.canonicalForm());
jsonPut(json, "sdp", sdp.description);
sendMessage(json);
pc.setLocalDescription(sdpObserver, sdp);
}
});
}
@Override public void onSetSuccess() {
runOnUiThread(new Runnable() {
public void run() {
if (appRtcClient.isInitiator()) {
if (pc.getRemoteDescription() != null) {
// We've set our local offer and received & set the remote
// answer, so drain candidates.
drainRemoteCandidates();
}
} else {
if (pc.getLocalDescription() == null) {
// We just set the remote offer, time to create our answer.
logAndToast("Creating answer");
pc.createAnswer(SDPObserver.this, sdpMediaConstraints);
} else {
// Sent our answer and set it as local description; drain
// candidates.
drainRemoteCandidates();
}
}
}
});
}
@Override public void onCreateFailure(final String error) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("createSDP error: " + error);
}
});
}
@Override public void onSetFailure(final String error) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("setSDP error: " + error);
}
});
}
private void drainRemoteCandidates() {
for (IceCandidate candidate : queuedRemoteCandidates) {
pc.addIceCandidate(candidate);
}
queuedRemoteCandidates = null;
}
}
// Implementation detail: handler for receiving GAE messages and dispatching
// them appropriately.
private class GAEHandler implements GAEChannelClient.MessageHandler {
@JavascriptInterface public void onOpen() {
if (!appRtcClient.isInitiator()) {
return;
}
logAndToast("Creating offer...");
pc.createOffer(sdpObserver, sdpMediaConstraints);
}
@JavascriptInterface public void onMessage(String data) {
try {
JSONObject json = new JSONObject(data);
String type = (String) json.get("type");
if (type.equals("candidate")) {
IceCandidate candidate = new IceCandidate(
(String) json.get("id"),
json.getInt("label"),
(String) json.get("candidate"));
if (queuedRemoteCandidates != null) {
queuedRemoteCandidates.add(candidate);
} else {
pc.addIceCandidate(candidate);
}
} else if (type.equals("answer") || type.equals("offer")) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type),
(String) json.get("sdp"));
pc.setRemoteDescription(sdpObserver, sdp);
} else if (type.equals("bye")) {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnectAndExit();
} else {
throw new RuntimeException("Unexpected message: " + data);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@JavascriptInterface public void onClose() {
disconnectAndExit();
}
@JavascriptInterface public void onError(int code, String description) {
disconnectAndExit();
}
}
// Disconnect from remote resources, dispose of local resources, and exit.
private void disconnectAndExit() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
quit[0] = true;
wakeLock.release();
if (pc != null) {
pc.dispose();
pc = null;
}
if (appRtcClient != null) {
appRtcClient.sendMessage("{\"type\": \"bye\"}");
appRtcClient.disconnect();
appRtcClient = null;
}
finish();
}
}
// Implementation detail: bridge the VideoRenderer.Callbacks interface to the
// VideoStreamsView implementation.
private class VideoCallbacks implements VideoRenderer.Callbacks {
private final VideoStreamsView view;
private final VideoStreamsView.Endpoint stream;
public VideoCallbacks(
VideoStreamsView view, VideoStreamsView.Endpoint stream) {
this.view = view;
this.stream = stream;
}
@Override
public void setSize(final int width, final int height) {
view.queueEvent(new Runnable() {
public void run() {
view.setSize(stream, width, height);
}
});
}
@Override
public void renderFrame(I420Frame frame) {
view.queueFrame(stream, frame);
}
}
}
| |
// Copyright 2009, 2010, 2012 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.test;
import com.thoughtworks.selenium.CommandProcessor;
import com.thoughtworks.selenium.DefaultSelenium;
import com.thoughtworks.selenium.HttpCommandProcessor;
import com.thoughtworks.selenium.Selenium;
import org.openqa.selenium.server.RemoteControlConfiguration;
import org.openqa.selenium.server.SeleniumServer;
import org.testng.Assert;
import org.testng.ITestContext;
import org.testng.annotations.*;
import org.testng.xml.XmlTest;
import java.io.File;
import java.lang.reflect.Method;
/**
* Base class for creating Selenium-based integration test cases. This class implements all the
* methods of {@link Selenium} and delegates to an instance (setup once per test by
* {@link #testStartup(org.testng.ITestContext, org.testng.xml.XmlTest)}.
*
* @since 5.2.0
*/
public abstract class SeleniumTestCase extends Assert implements Selenium
{
/**
* 15 seconds
*/
public static final String PAGE_LOAD_TIMEOUT = "15000";
public static final String TOMCAT_6 = "tomcat6";
public static final String JETTY_7 = "jetty7";
/**
* An XPath expression for locating a submit element (very commonly used
* with {@link #clickAndWait(String)}.
*
* @since 5.3
*/
public static final String SUBMIT = "//input[@type='submit']";
/**
* The underlying {@link Selenium} instance that all the methods of this class delegate to;
* this can be useful when attempting to use SeleniumTestCase with a newer version of Selenium which
* has added some methods to the interface. This field will not be set until the test case instance
* has gone through its full initialization.
*
* @since 5.3
*/
protected Selenium selenium;
private String baseURL;
private ErrorReporter errorReporter;
private ITestContext testContext;
/**
* Starts up the servers for the entire test (i.e., for multiple TestCases). By placing <parameter> elements
* inside the appropriate <test> (of your testng.xml configuration
* file), you can change the configuration or behavior of the servers. It is common to have two
* or more identical tests that differ only in terms of the <code>tapestry.browser-start-command</code> parameter,
* to run tests against multiple browsers.
* <table>
* <tr>
* <th>Parameter</th>
* <th>Name</th>
* <th>Default</th>
* <th>Description</th>
* </tr>
* <tr>
* <td>container</td>
* <td>tapestry.servlet-container</td>
* <td>JETTY_7</td>
* <td>The Servlet container to use for the tests. Currently {@link #JETTY_7} or {@link #TOMCAT_6}</td>
* </tr>
* <tr>
* <td>webAppFolder</td>
* <td>tapestry.web-app-folder</td>
* <td>src/main/webapp</td>
* <td>Location of web application context</td>
* </tr>
* <tr>
* <td>contextPath</td>
* <td>tapestry.context-path</td>
* <td><em>empty string</em></td>
* <td>Context path (defaults to root). As elsewhere, the context path should be blank, or start with a slash (but
* not end with one).</td>
* </tr>
* <tr>
* <td>port</td>
* <td>tapestry.port</td>
* <td>9090</td>
* <td>Port number for web server to listen to</td>
* </tr>
* <tr>
* <td>sslPort</td>
* <td>tapestry.ssl-port</td>
* <td>8443</td>
* <td>Port number for web server to listen to for secure requests</td>
* </tr>
* <tr>
* <td>browserStartCommand</td>
* <td>tapestry.browser-start-command</td>
* <td>*firefox</td>
* <td>Command string used to launch the browser, as defined by Selenium</td>
* </tr>
* </table>
* <p/>
* Tests in the <em>beforeStartup</em> group will be run before the start of Selenium. This can be used to
* programmatically override the above parameter values.
* <p/>
* This method will be invoked in <em>each</em> subclass, but is set up to only startup the servers once (it checks
* the {@link ITestContext} to see if the necessary keys are already present).
*
* @param testContext Used to share objects between the launcher and the test suites
* @throws Exception
*/
@BeforeTest(dependsOnGroups =
{"beforeStartup"})
public void testStartup(final ITestContext testContext, XmlTest xmlTest) throws Exception
{
// This is not actually necessary, because TestNG will only invoke this method once
// even when multiple test cases within the test extend from SeleniumTestCase. TestNG
// just invokes it on the "first" TestCase instance it has test methods for.
if (testContext.getAttribute(TapestryTestConstants.SHUTDOWN_ATTRIBUTE) != null)
{
return;
}
// If a parameter is overridden in another test method, TestNG won't pass the
// updated value via a parameter, but still passes the original (coming from testng.xml or the default).
// Seems like a TestNG bug.
// Map<String, String> testParameters = xmlTest.getParameters();
TapestryTestConfiguration annotation = this.getClass().getAnnotation(TapestryTestConfiguration.class);
if (annotation == null)
{
@TapestryTestConfiguration
final class EmptyInnerClass
{
}
annotation = EmptyInnerClass.class.getAnnotation(TapestryTestConfiguration.class);
}
String webAppFolder = getParameter(xmlTest, TapestryTestConstants.WEB_APP_FOLDER_PARAMETER,
annotation.webAppFolder());
String container = getParameter(xmlTest, TapestryTestConstants.SERVLET_CONTAINER_PARAMETER,
annotation.container());
String contextPath = getParameter(xmlTest, TapestryTestConstants.CONTEXT_PATH_PARAMETER,
annotation.contextPath());
int port = getIntParameter(xmlTest, TapestryTestConstants.PORT_PARAMETER, annotation.port());
int sslPort = getIntParameter(xmlTest, TapestryTestConstants.SSL_PORT_PARAMETER, annotation.sslPort());
String browserStartCommand = getParameter(xmlTest, TapestryTestConstants.BROWSER_START_COMMAND_PARAMETER,
annotation.browserStartCommand());
String baseURL = String.format("http://localhost:%d%s/", port, contextPath);
System.err.println("Starting SeleniumTestCase:");
System.err.println(" currentDir: " + System.getProperty("user.dir"));
System.err.println(" webAppFolder: " + webAppFolder);
System.err.println(" container: " + container);
System.err.println(" contextPath: " + contextPath);
System.err.printf(" ports: %d / %d%n", port, sslPort);
System.err.println(" browserStart: " + browserStartCommand);
System.err.println(" baseURL: " + baseURL);
final Runnable stopWebServer = launchWebServer(container, webAppFolder, contextPath, port, sslPort);
final SeleniumServer seleniumServer = new SeleniumServer();
File ffProfileTemplate = new File(TapestryTestConstants.MODULE_BASE_DIR, "src/test/conf/ff_profile_template");
if (ffProfileTemplate.isDirectory())
{
seleniumServer.getConfiguration().setFirefoxProfileTemplate(ffProfileTemplate);
}
seleniumServer.start();
CommandProcessor httpCommandProcessor = new HttpCommandProcessor("localhost",
RemoteControlConfiguration.DEFAULT_PORT, browserStartCommand, baseURL);
final ErrorReporterImpl errorReporter = new ErrorReporterImpl(httpCommandProcessor, testContext);
ErrorReportingCommandProcessor commandProcessor = new ErrorReportingCommandProcessor(httpCommandProcessor,
errorReporter);
final Selenium selenium = new DefaultSelenium(commandProcessor);
selenium.start();
testContext.setAttribute(TapestryTestConstants.BASE_URL_ATTRIBUTE, baseURL);
testContext.setAttribute(TapestryTestConstants.SELENIUM_ATTRIBUTE, selenium);
testContext.setAttribute(TapestryTestConstants.ERROR_REPORTER_ATTRIBUTE, errorReporter);
testContext.setAttribute(TapestryTestConstants.COMMAND_PROCESSOR_ATTRIBUTE, commandProcessor);
testContext.setAttribute(TapestryTestConstants.SHUTDOWN_ATTRIBUTE, new Runnable()
{
public void run()
{
try
{
selenium.stop();
seleniumServer.stop();
stopWebServer.run();
// Output, at the end of the Test, any html capture or screen shots (this makes it much easier
// to locate them at the end of the run; there's such a variance on where they end up based
// on whether the tests are running from inside an IDE or via one of the command line
// builds.
errorReporter.writeOutputPaths();
} finally
{
testContext.removeAttribute(TapestryTestConstants.BASE_URL_ATTRIBUTE);
testContext.removeAttribute(TapestryTestConstants.SELENIUM_ATTRIBUTE);
testContext.removeAttribute(TapestryTestConstants.ERROR_REPORTER_ATTRIBUTE);
testContext.removeAttribute(TapestryTestConstants.COMMAND_PROCESSOR_ATTRIBUTE);
testContext.removeAttribute(TapestryTestConstants.SHUTDOWN_ATTRIBUTE);
}
}
});
}
private final String getParameter(XmlTest xmlTest, String key, String defaultValue)
{
String value = xmlTest.getParameter(key);
return value != null ? value : defaultValue;
}
private final int getIntParameter(XmlTest xmlTest, String key, int defaultValue)
{
String value = xmlTest.getParameter(key);
return value != null ? Integer.parseInt(value) : defaultValue;
}
/**
* Like {@link #testStartup(org.testng.ITestContext, org.testng.xml.XmlTest)} , this may
* be called multiple times against multiple instances, but only does work the first time.
*/
@AfterTest
public void testShutdown(ITestContext context)
{
// Likewise, this method should only be invoked once.
Runnable r = (Runnable) context.getAttribute(TapestryTestConstants.SHUTDOWN_ATTRIBUTE);
// This test is still useful, however, because testStartup() may not have completed properly,
// and the runnable is the last thing it puts into the test context.
if (r != null)
{
r.run();
}
}
/**
* Invoked from {@link #testStartup(org.testng.ITestContext, org.testng.xml.XmlTest)} to launch the web
* server to be tested. The return value is a Runnable that can be invoked later to cleanly shut down the launched
* server at the end of the test.
*
* @param container identifies which web server should be launched
* @param webAppFolder path to the web application context
* @param contextPath the path the context is mapped to, usually the empty string
* @param port the port number the server should handle
* @param sslPort the port number on which the server should handle secure requests
* @return Runnable used to shut down the server
* @throws Exception
*/
protected Runnable launchWebServer(String container, String webAppFolder, String contextPath, int port, int sslPort)
throws Exception
{
final ServletContainerRunner runner = createWebServer(container, webAppFolder, contextPath, port, sslPort);
return new Runnable()
{
public void run()
{
runner.stop();
}
};
}
private ServletContainerRunner createWebServer(String container, String webAppFolder, String contextPath, int port, int sslPort) throws Exception
{
if (TOMCAT_6.equals(container))
{
return new Tomcat6Runner(webAppFolder, contextPath, port, sslPort);
}
if (JETTY_7.equals(container))
{
return new Jetty7Runner(webAppFolder, contextPath, port, sslPort);
}
throw new RuntimeException("Unknown servlet container: " + container);
}
@BeforeClass
public void setup(ITestContext context)
{
this.testContext = context;
selenium = (Selenium) context.getAttribute(TapestryTestConstants.SELENIUM_ATTRIBUTE);
baseURL = (String) context.getAttribute(TapestryTestConstants.BASE_URL_ATTRIBUTE);
errorReporter = (ErrorReporter) context.getAttribute(TapestryTestConstants.ERROR_REPORTER_ATTRIBUTE);
}
@AfterClass
public void cleanup()
{
selenium = null;
baseURL = null;
errorReporter = null;
testContext = null;
}
/**
* Delegates to {@link ErrorReporter#writeErrorReport()} to capture the current page markup in a
* file for later analysis.
*/
protected void writeErrorReport()
{
errorReporter.writeErrorReport();
}
/**
* Returns the base URL for the application. This is of the typically <code>http://localhost:9999/</code> (i.e., it
* includes a trailing slash).
* <p/>
* Generally, you should use {@link #openLinks(String...)} to start from your application's home page.
*/
public String getBaseURL()
{
return baseURL;
}
@BeforeMethod
public void indicateTestMethodName(Method testMethod)
{
testContext.setAttribute(TapestryTestConstants.CURRENT_TEST_METHOD_ATTRIBUTE, testMethod);
String className = testMethod.getDeclaringClass().getSimpleName();
String testName = testMethod.getName().replace("_", " ");
selenium.setContext(className + ": " + testName);
}
@AfterMethod
public void cleanupTestMethod()
{
testContext.setAttribute(TapestryTestConstants.CURRENT_TEST_METHOD_ATTRIBUTE, null);
}
// ---------------------------------------------------------------------
// Start of delegate methods
//
// When upgrading to a new version of Selenium, it is probably easiest
// to delete all these methods and use the Generate Delegate Methods
// refactoring.
// ---------------------------------------------------------------------
public void addCustomRequestHeader(String key, String value)
{
selenium.addCustomRequestHeader(key, value);
}
public void addLocationStrategy(String strategyName, String functionDefinition)
{
selenium.addLocationStrategy(strategyName, functionDefinition);
}
public void addScript(String scriptContent, String scriptTagId)
{
selenium.addScript(scriptContent, scriptTagId);
}
public void addSelection(String locator, String optionLocator)
{
selenium.addSelection(locator, optionLocator);
}
public void allowNativeXpath(String allow)
{
selenium.allowNativeXpath(allow);
}
public void altKeyDown()
{
selenium.altKeyDown();
}
public void altKeyUp()
{
selenium.altKeyUp();
}
public void answerOnNextPrompt(String answer)
{
selenium.answerOnNextPrompt(answer);
}
public void assignId(String locator, String identifier)
{
selenium.assignId(locator, identifier);
}
public void attachFile(String fieldLocator, String fileLocator)
{
selenium.attachFile(fieldLocator, fileLocator);
}
public void captureEntirePageScreenshot(String filename, String kwargs)
{
selenium.captureEntirePageScreenshot(filename, kwargs);
}
public String captureEntirePageScreenshotToString(String kwargs)
{
return selenium.captureEntirePageScreenshotToString(kwargs);
}
public String captureNetworkTraffic(String type)
{
return selenium.captureNetworkTraffic(type);
}
public void captureScreenshot(String filename)
{
selenium.captureScreenshot(filename);
}
public String captureScreenshotToString()
{
return selenium.captureScreenshotToString();
}
public void check(String locator)
{
selenium.check(locator);
}
public void chooseCancelOnNextConfirmation()
{
selenium.chooseCancelOnNextConfirmation();
}
public void chooseOkOnNextConfirmation()
{
selenium.chooseOkOnNextConfirmation();
}
public void click(String locator)
{
selenium.click(locator);
}
public void clickAt(String locator, String coordString)
{
selenium.clickAt(locator, coordString);
}
public void close()
{
selenium.close();
}
public void contextMenu(String locator)
{
selenium.contextMenu(locator);
}
public void contextMenuAt(String locator, String coordString)
{
selenium.contextMenuAt(locator, coordString);
}
public void controlKeyDown()
{
selenium.controlKeyDown();
}
public void controlKeyUp()
{
selenium.controlKeyUp();
}
public void createCookie(String nameValuePair, String optionsString)
{
selenium.createCookie(nameValuePair, optionsString);
}
public void deleteAllVisibleCookies()
{
selenium.deleteAllVisibleCookies();
}
public void deleteCookie(String name, String optionsString)
{
selenium.deleteCookie(name, optionsString);
}
public void deselectPopUp()
{
selenium.deselectPopUp();
}
public void doubleClick(String locator)
{
selenium.doubleClick(locator);
}
public void doubleClickAt(String locator, String coordString)
{
selenium.doubleClickAt(locator, coordString);
}
public void dragAndDrop(String locator, String movementsString)
{
selenium.dragAndDrop(locator, movementsString);
}
public void dragAndDropToObject(String locatorOfObjectToBeDragged, String locatorOfDragDestinationObject)
{
selenium.dragAndDropToObject(locatorOfObjectToBeDragged, locatorOfDragDestinationObject);
}
public void dragdrop(String locator, String movementsString)
{
selenium.dragdrop(locator, movementsString);
}
public void fireEvent(String locator, String eventName)
{
selenium.fireEvent(locator, eventName);
}
public void focus(String locator)
{
selenium.focus(locator);
}
public String getAlert()
{
return selenium.getAlert();
}
public String[] getAllButtons()
{
return selenium.getAllButtons();
}
public String[] getAllFields()
{
return selenium.getAllFields();
}
public String[] getAllLinks()
{
return selenium.getAllLinks();
}
public String[] getAllWindowIds()
{
return selenium.getAllWindowIds();
}
public String[] getAllWindowNames()
{
return selenium.getAllWindowNames();
}
public String[] getAllWindowTitles()
{
return selenium.getAllWindowTitles();
}
public String getAttribute(String attributeLocator)
{
return selenium.getAttribute(attributeLocator);
}
public String[] getAttributeFromAllWindows(String attributeName)
{
return selenium.getAttributeFromAllWindows(attributeName);
}
public String getBodyText()
{
return selenium.getBodyText();
}
public String getConfirmation()
{
return selenium.getConfirmation();
}
public String getCookie()
{
return selenium.getCookie();
}
public String getCookieByName(String name)
{
return selenium.getCookieByName(name);
}
public Number getCursorPosition(String locator)
{
return selenium.getCursorPosition(locator);
}
public Number getElementHeight(String locator)
{
return selenium.getElementHeight(locator);
}
public Number getElementIndex(String locator)
{
return selenium.getElementIndex(locator);
}
public Number getElementPositionLeft(String locator)
{
return selenium.getElementPositionLeft(locator);
}
public Number getElementPositionTop(String locator)
{
return selenium.getElementPositionTop(locator);
}
public Number getElementWidth(String locator)
{
return selenium.getElementWidth(locator);
}
public String getEval(String script)
{
return selenium.getEval(script);
}
public String getExpression(String expression)
{
return selenium.getExpression(expression);
}
public String getHtmlSource()
{
return selenium.getHtmlSource();
}
public String getLocation()
{
return selenium.getLocation();
}
public String getLog()
{
return selenium.getLog();
}
public Number getMouseSpeed()
{
return selenium.getMouseSpeed();
}
public String getPrompt()
{
return selenium.getPrompt();
}
public String getSelectedId(String selectLocator)
{
return selenium.getSelectedId(selectLocator);
}
public String[] getSelectedIds(String selectLocator)
{
return selenium.getSelectedIds(selectLocator);
}
public String getSelectedIndex(String selectLocator)
{
return selenium.getSelectedIndex(selectLocator);
}
public String[] getSelectedIndexes(String selectLocator)
{
return selenium.getSelectedIndexes(selectLocator);
}
public String getSelectedLabel(String selectLocator)
{
return selenium.getSelectedLabel(selectLocator);
}
public String[] getSelectedLabels(String selectLocator)
{
return selenium.getSelectedLabels(selectLocator);
}
public String getSelectedValue(String selectLocator)
{
return selenium.getSelectedValue(selectLocator);
}
public String[] getSelectedValues(String selectLocator)
{
return selenium.getSelectedValues(selectLocator);
}
public String[] getSelectOptions(String selectLocator)
{
return selenium.getSelectOptions(selectLocator);
}
public String getSpeed()
{
return selenium.getSpeed();
}
public String getTable(String tableCellAddress)
{
return selenium.getTable(tableCellAddress);
}
public String getText(String locator)
{
return selenium.getText(locator);
}
public String getTitle()
{
return selenium.getTitle();
}
public String getValue(String locator)
{
return selenium.getValue(locator);
}
public boolean getWhetherThisFrameMatchFrameExpression(String currentFrameString, String target)
{
return selenium.getWhetherThisFrameMatchFrameExpression(currentFrameString, target);
}
public boolean getWhetherThisWindowMatchWindowExpression(String currentWindowString, String target)
{
return selenium.getWhetherThisWindowMatchWindowExpression(currentWindowString, target);
}
public Number getXpathCount(String xpath)
{
return selenium.getXpathCount(xpath);
}
public void goBack()
{
selenium.goBack();
}
public void highlight(String locator)
{
selenium.highlight(locator);
}
public void ignoreAttributesWithoutValue(String ignore)
{
selenium.ignoreAttributesWithoutValue(ignore);
}
public boolean isAlertPresent()
{
return selenium.isAlertPresent();
}
public boolean isChecked(String locator)
{
return selenium.isChecked(locator);
}
public boolean isConfirmationPresent()
{
return selenium.isConfirmationPresent();
}
public boolean isCookiePresent(String name)
{
return selenium.isCookiePresent(name);
}
public boolean isEditable(String locator)
{
return selenium.isEditable(locator);
}
public boolean isElementPresent(String locator)
{
return selenium.isElementPresent(locator);
}
public boolean isOrdered(String locator1, String locator2)
{
return selenium.isOrdered(locator1, locator2);
}
public boolean isPromptPresent()
{
return selenium.isPromptPresent();
}
public boolean isSomethingSelected(String selectLocator)
{
return selenium.isSomethingSelected(selectLocator);
}
public boolean isTextPresent(String pattern)
{
return selenium.isTextPresent(pattern);
}
public boolean isVisible(String locator)
{
return selenium.isVisible(locator);
}
public void keyDown(String locator, String keySequence)
{
selenium.keyDown(locator, keySequence);
}
public void keyDownNative(String keycode)
{
selenium.keyDownNative(keycode);
}
public void keyPress(String locator, String keySequence)
{
selenium.keyPress(locator, keySequence);
}
public void keyPressNative(String keycode)
{
selenium.keyPressNative(keycode);
}
public void keyUp(String locator, String keySequence)
{
selenium.keyUp(locator, keySequence);
}
public void keyUpNative(String keycode)
{
selenium.keyUpNative(keycode);
}
public void metaKeyDown()
{
selenium.metaKeyDown();
}
public void metaKeyUp()
{
selenium.metaKeyUp();
}
public void mouseDown(String locator)
{
selenium.mouseDown(locator);
}
public void mouseDownAt(String locator, String coordString)
{
selenium.mouseDownAt(locator, coordString);
}
public void mouseDownRight(String locator)
{
selenium.mouseDownRight(locator);
}
public void mouseDownRightAt(String locator, String coordString)
{
selenium.mouseDownRightAt(locator, coordString);
}
public void mouseMove(String locator)
{
selenium.mouseMove(locator);
}
public void mouseMoveAt(String locator, String coordString)
{
selenium.mouseMoveAt(locator, coordString);
}
public void mouseOut(String locator)
{
selenium.mouseOut(locator);
}
public void mouseOver(String locator)
{
selenium.mouseOver(locator);
}
public void mouseUp(String locator)
{
selenium.mouseUp(locator);
}
public void mouseUpAt(String locator, String coordString)
{
selenium.mouseUpAt(locator, coordString);
}
public void mouseUpRight(String locator)
{
selenium.mouseUpRight(locator);
}
public void mouseUpRightAt(String locator, String coordString)
{
selenium.mouseUpRightAt(locator, coordString);
}
public void open(String url)
{
selenium.open(url);
}
public void open(String url, String ignoreResponseCode)
{
selenium.open(url, ignoreResponseCode);
}
public void openWindow(String url, String windowID)
{
selenium.openWindow(url, windowID);
}
public void refresh()
{
selenium.refresh();
}
public void removeAllSelections(String locator)
{
selenium.removeAllSelections(locator);
}
public void removeScript(String scriptTagId)
{
selenium.removeScript(scriptTagId);
}
public void removeSelection(String locator, String optionLocator)
{
selenium.removeSelection(locator, optionLocator);
}
public String retrieveLastRemoteControlLogs()
{
return selenium.retrieveLastRemoteControlLogs();
}
public void rollup(String rollupName, String kwargs)
{
selenium.rollup(rollupName, kwargs);
}
public void runScript(String script)
{
selenium.runScript(script);
}
public void select(String selectLocator, String optionLocator)
{
selenium.select(selectLocator, optionLocator);
}
public void selectFrame(String locator)
{
selenium.selectFrame(locator);
}
public void selectPopUp(String windowID)
{
selenium.selectPopUp(windowID);
}
public void selectWindow(String windowID)
{
selenium.selectWindow(windowID);
}
public void setBrowserLogLevel(String logLevel)
{
selenium.setBrowserLogLevel(logLevel);
}
public void setContext(String context)
{
selenium.setContext(context);
}
public void setCursorPosition(String locator, String position)
{
selenium.setCursorPosition(locator, position);
}
public void setExtensionJs(String extensionJs)
{
selenium.setExtensionJs(extensionJs);
}
public void setMouseSpeed(String pixels)
{
selenium.setMouseSpeed(pixels);
}
public void setSpeed(String value)
{
selenium.setSpeed(value);
}
public void setTimeout(String timeout)
{
selenium.setTimeout(timeout);
}
public void shiftKeyDown()
{
selenium.shiftKeyDown();
}
public void shiftKeyUp()
{
selenium.shiftKeyUp();
}
public void showContextualBanner()
{
selenium.showContextualBanner();
}
public void showContextualBanner(String className, String methodName)
{
selenium.showContextualBanner(className, methodName);
}
public void shutDownSeleniumServer()
{
selenium.shutDownSeleniumServer();
}
public void start()
{
selenium.start();
}
public void start(Object optionsObject)
{
selenium.start(optionsObject);
}
public void start(String optionsString)
{
selenium.start(optionsString);
}
public void stop()
{
selenium.stop();
}
public void submit(String formLocator)
{
selenium.submit(formLocator);
}
public void type(String locator, String value)
{
selenium.type(locator, value);
}
public void typeKeys(String locator, String value)
{
selenium.typeKeys(locator, value);
}
public void uncheck(String locator)
{
selenium.uncheck(locator);
}
public void useXpathLibrary(String libraryName)
{
selenium.useXpathLibrary(libraryName);
}
public void waitForCondition(String script, String timeout)
{
selenium.waitForCondition(script, timeout);
}
public void waitForFrameToLoad(String frameAddress, String timeout)
{
selenium.waitForFrameToLoad(frameAddress, timeout);
}
public void waitForPageToLoad(String timeout)
{
selenium.waitForPageToLoad(timeout);
}
public void waitForPopUp(String windowID, String timeout)
{
selenium.waitForPopUp(windowID, timeout);
}
public void windowFocus()
{
selenium.windowFocus();
}
public void windowMaximize()
{
selenium.windowMaximize();
}
// ---------------------------------------------------------------------
// End of delegate methods
// ---------------------------------------------------------------------
protected final void unreachable()
{
writeErrorReport();
throw new AssertionError("This statement should not be reachable.");
}
/**
* Open the {@linkplain #getBaseURL()}, and waits for the page to load.
*/
protected final void openBaseURL()
{
open(baseURL);
waitForPageToLoad();
}
/**
* Asserts the text of an element, identified by the locator.
*
* @param locator identifies the element whose text value is to be asserted
* @param expected expected value for the element's text
*/
protected final void assertText(String locator, String expected)
{
String actual = null;
try
{
actual = getText(locator);
} catch (RuntimeException ex)
{
System.err.printf("Error accessing %s: %s, in:\n\n%s\n\n", locator, ex.getMessage(), getHtmlSource());
throw ex;
}
if (actual.equals(expected))
return;
writeErrorReport();
throw new AssertionError(String.format("%s was '%s' not '%s'", locator, actual, expected));
}
protected final void assertTextPresent(String... text)
{
for (String item : text)
{
if (isTextPresent(item))
continue;
writeErrorReport();
throw new AssertionError("Page did not contain '" + item + "'.");
}
}
/**
* Assets that each string provided is present somewhere in the current document.
*
* @param expected string expected to be present
*/
protected final void assertSourcePresent(String... expected)
{
String source = getHtmlSource();
for (String snippet : expected)
{
if (source.contains(snippet))
continue;
writeErrorReport();
throw new AssertionError("Page did not contain source '" + snippet + "'.");
}
}
/**
* Click a link identified by a locator, then wait for the resulting page to load.
* This is not useful for Ajax updates, just normal full-page refreshes.
*
* @param locator identifies the link to click
*/
protected final void clickAndWait(String locator)
{
click(locator);
waitForPageToLoad();
}
/**
* Waits for the page to load (up to 15 seconds). This is invoked after clicking on an element
* that forces a full page refresh.
*/
protected final void waitForPageToLoad()
{
waitForPageToLoad(PAGE_LOAD_TIMEOUT);
}
/**
* Used when the locator identifies an attribute, not an element.
*
* @param locator identifies the attribute whose value is to be asserted
* @param expected expected value for the attribute
*/
protected final void assertAttribute(String locator, String expected)
{
String actual = null;
try
{
actual = getAttribute(locator);
} catch (RuntimeException ex)
{
System.err.printf("Error accessing %s: %s", locator, ex.getMessage());
writeErrorReport();
throw ex;
}
if (actual.equals(expected))
return;
writeErrorReport();
throw new AssertionError(String.format("%s was '%s' not '%s'", locator, actual, expected));
}
/**
* Assets that the value in the field matches the expectation
*
* @param locator identifies the field
* @param expected expected value for the field
* @since 5.3
*/
protected final void assertFieldValue(String locator, String expected)
{
try
{
assertEquals(getValue(locator), expected);
} catch (AssertionError ex)
{
writeErrorReport();
throw ex;
}
}
/**
* Opens the base URL, then clicks through a series of links to get to a desired application
* state.
*
* @since 5.3
*/
protected final void openLinks(String... linkText)
{
openBaseURL();
for (String text : linkText)
{
clickAndWait("link=" + text);
}
}
/**
* Sleeps for the indicated number of seconds.
*
* @since 5.3
*/
protected final void sleep(long millis)
{
try
{
Thread.sleep(millis);
} catch (InterruptedException ex)
{
// Ignore.
}
}
/**
* Waits, up to the page load limit for an element (identified by a CSS rule) to exist
* (it is not assured that the element will be visible).
*
* @param cssRule used to locate the element
* @since 5.3
*/
protected void waitForCSSSelectedElementToAppear(String cssRule)
{
String condition = String.format("window.$$(\"%s\").size() > 0", cssRule);
waitForCondition(condition, PAGE_LOAD_TIMEOUT);
}
/**
* Waits for the element with the given client-side id to be present in the DOM (
* does not assure that the element is visible).
*
* @param elementId identifies the element
* @since 5.3
*/
protected final void waitForElementToAppear(String elementId)
{
String condition = String.format("window.$(\"%s\")", elementId);
waitForCondition(condition, PAGE_LOAD_TIMEOUT);
}
/**
* Waits for the element to be removed from the DOM.
*
* @param elementId client-side id of element
* @since 5.3
*/
protected final void waitForElementToDisappear(String elementId)
{
String condition = String.format("window.$(\"%s\").hide()", elementId);
waitForCondition(condition, PAGE_LOAD_TIMEOUT);
}
/**
* Waits for the element specified by the selector to become visible
* Note that waitForElementToAppear waits for the element to be present in the dom, visible or not. waitForVisible
* waits for an element that already exists in the dom to become visible.
*
* @param selector element selector
* @since 5.3
*/
protected final void waitForVisible(String selector)
{
String condition = String.format("selenium.isVisible(\"%s\")", selector);
waitForCondition(condition, PAGE_LOAD_TIMEOUT);
}
/**
* Waits for the element specified by the selector to become invisible
* Note that waitForElementToDisappear waits for the element to be absent from the dom, visible or not. waitForInvisible
* waits for an existing element to become invisible.
*
* @param selector element selector
* @since 5.3
*/
protected final void waitForInvisible(String selector)
{
String condition = String.format("!selenium.isVisible(\"%s\")", selector);
waitForCondition(condition, PAGE_LOAD_TIMEOUT);
}
/**
* Asserts that the current page's title matches the expected value.
*
* @param expected value for title
* @since 5.3
*/
protected final void assertTitle(String expected)
{
try
{
assertEquals(getTitle(), expected);
} catch (AssertionError ex)
{
writeErrorReport();
throw ex;
}
}
/**
* Waits until all active XHR requests are completed.
*
* @param timeout timeout to wait for
* @since 5.3
*/
protected final void waitForAjaxRequestsToComplete(String timeout)
{
waitForCondition("selenium.browserbot.getCurrentWindow().Ajax.activeRequestCount == 0", timeout);
}
public Number getCssCount(String str)
{
return selenium.getCssCount(str);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.compute.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.compute.fluent.models.GalleryApplicationVersionInner;
import com.azure.resourcemanager.compute.models.GalleryApplicationVersionUpdate;
import com.azure.resourcemanager.compute.models.ReplicationStatusTypes;
import java.nio.ByteBuffer;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in GalleryApplicationVersionsClient. */
public interface GalleryApplicationVersionsClient {
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update along with
* {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion);
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link PollerFlux} for polling of specifies information about the gallery Application Version that
* you want to create or update.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
PollerFlux<PollResult<GalleryApplicationVersionInner>, GalleryApplicationVersionInner> beginCreateOrUpdateAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion);
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of specifies information about the gallery Application Version that
* you want to create or update.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<GalleryApplicationVersionInner>, GalleryApplicationVersionInner> beginCreateOrUpdate(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion);
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of specifies information about the gallery Application Version that
* you want to create or update.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<GalleryApplicationVersionInner>, GalleryApplicationVersionInner> beginCreateOrUpdate(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion,
Context context);
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update on
* successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<GalleryApplicationVersionInner> createOrUpdateAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion);
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
GalleryApplicationVersionInner createOrUpdate(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion);
/**
* Create or update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be created.
* @param galleryApplicationVersionName The name of the gallery Application Version to be created. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the create or update gallery Application Version
* operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
GalleryApplicationVersionInner createOrUpdate(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionInner galleryApplicationVersion,
Context context);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update along with
* {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> updateWithResponseAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link PollerFlux} for polling of specifies information about the gallery Application Version that
* you want to create or update.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
PollerFlux<PollResult<GalleryApplicationVersionInner>, GalleryApplicationVersionInner> beginUpdateAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of specifies information about the gallery Application Version that
* you want to create or update.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<GalleryApplicationVersionInner>, GalleryApplicationVersionInner> beginUpdate(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of specifies information about the gallery Application Version that
* you want to create or update.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<GalleryApplicationVersionInner>, GalleryApplicationVersionInner> beginUpdate(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion,
Context context);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update on
* successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<GalleryApplicationVersionInner> updateAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
GalleryApplicationVersionInner update(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion);
/**
* Update a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version is
* to be updated.
* @param galleryApplicationVersionName The name of the gallery Application Version to be updated. Needs to follow
* semantic version name pattern: The allowed characters are digit and period. Digits must be within the range
* of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>.
* @param galleryApplicationVersion Parameters supplied to the update gallery Application Version operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
GalleryApplicationVersionInner update(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
GalleryApplicationVersionUpdate galleryApplicationVersion,
Context context);
/**
* Retrieves information about a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be retrieved.
* @param expand The expand expression to apply on the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update along with
* {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<GalleryApplicationVersionInner>> getWithResponseAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
ReplicationStatusTypes expand);
/**
* Retrieves information about a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be retrieved.
* @param expand The expand expression to apply on the operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update on
* successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<GalleryApplicationVersionInner> getAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
ReplicationStatusTypes expand);
/**
* Retrieves information about a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update on
* successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<GalleryApplicationVersionInner> getAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Retrieves information about a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be retrieved.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
GalleryApplicationVersionInner get(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Retrieves information about a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be retrieved.
* @param expand The expand expression to apply on the operation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return specifies information about the gallery Application Version that you want to create or update along with
* {@link Response}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<GalleryApplicationVersionInner> getWithResponse(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
ReplicationStatusTypes expand,
Context context);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link PollerFlux} for polling of long-running operation.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
PollerFlux<PollResult<Void>, Void> beginDeleteAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of long-running operation.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link SyncPoller} for polling of long-running operation.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
Context context);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return A {@link Mono} that completes when a successful response is received.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Void> deleteAsync(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName);
/**
* Delete a gallery Application Version.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the gallery Application Definition in which the Application Version
* resides.
* @param galleryApplicationVersionName The name of the gallery Application Version to be deleted.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(
String resourceGroupName,
String galleryName,
String galleryApplicationName,
String galleryApplicationVersionName,
Context context);
/**
* List gallery Application Versions in a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the Shared Application Gallery Application Definition from which the
* Application Versions are to be listed.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the List Gallery Application version operation response as paginated response with {@link PagedFlux}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<GalleryApplicationVersionInner> listByGalleryApplicationAsync(
String resourceGroupName, String galleryName, String galleryApplicationName);
/**
* List gallery Application Versions in a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the Shared Application Gallery Application Definition from which the
* Application Versions are to be listed.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the List Gallery Application version operation response as paginated response with {@link PagedIterable}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<GalleryApplicationVersionInner> listByGalleryApplication(
String resourceGroupName, String galleryName, String galleryApplicationName);
/**
* List gallery Application Versions in a gallery Application Definition.
*
* @param resourceGroupName The name of the resource group.
* @param galleryName The name of the Shared Application Gallery in which the Application Definition resides.
* @param galleryApplicationName The name of the Shared Application Gallery Application Definition from which the
* Application Versions are to be listed.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.compute.models.ApiErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the List Gallery Application version operation response as paginated response with {@link PagedIterable}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<GalleryApplicationVersionInner> listByGalleryApplication(
String resourceGroupName, String galleryName, String galleryApplicationName, Context context);
}
| |
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.compute;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* <p>
* An operating system associated with servers and images.
* </p>
* @author George Reese @ enstratius (http://www.enstratius.com)
*/
public enum Platform {
/**
* Generic UNIX
*/
UNIX,
/**
* Ubuntu
*/
UBUNTU,
/**
* Debian
*/
DEBIAN,
/**
* Solaris
*/
SOLARIS,
/**
* Fedora Core
*/
FEDORA_CORE,
/**
* RHEL
*/
RHEL,
/**
* FreeBSD
*/
FREE_BSD,
/**
* OpenBSD
*/
OPEN_BSD,
/**
* CentOS
*/
CENT_OS,
/**
* CoreOS
*/
COREOS,
/**
* Generic Windows
*/
WINDOWS,
/**
* SUSE
*/
SUSE,
/**
* SmartOS
*/
SMARTOS,
/**
* No clue
*/
UNKNOWN;
static public @Nonnull Platform guess(@Nullable String name) {
if( name == null ) {
return UNKNOWN;
}
name = name.toLowerCase();
if( name.contains("centos") ) {
return CENT_OS;
}
else if( name.contains("coreos") ) {
return COREOS;
}
else if( name.contains("ubuntu") ) {
return UBUNTU;
}
else if( name.contains("fedora") ) {
return FEDORA_CORE;
}
else if( name.contains("windows") || name.contains("win") ) {
return WINDOWS;
}
else if( name.contains("red hat") || name.contains("redhat") || name.contains("red-hat") || name.contains("rhel") ) {
return RHEL;
}
else if( name.contains("debian") ) {
return DEBIAN;
}
else if( name.contains("suse") || name.contains("sles") ) {
return SUSE;
}
else if(name.contains("smartos")){
return SMARTOS;
}
else if( name.contains("bsd") ) {
if( name.contains("free") ) {
return FREE_BSD;
}
else if( name.contains("open") ) {
return OPEN_BSD;
}
else {
return UNIX;
}
}
else if( name.contains("solaris") ) {
return SOLARIS;
}
else if( name.contains("linux") ) {
return UNIX;
}
return UNKNOWN;
}
/**
* Provides an appropriate device ID (e.g. sdh) for this platform given a device letter.
* @param letter the letter to be mapped into a platform-specific device ID
* @return the platform-specific device ID for the target letter
*/
public String getDeviceId(String letter) {
switch( this ) {
case WINDOWS: return "xvd" + letter;
default: return "sd" + letter;
}
}
/**
* Provides a device mapping (e.g. /dev/sdh) for the target device letter.
* @param letter the letter to be mapped
* @return the device mapping for the specified letter
*/
public String getDeviceMapping(String letter) {
switch( this ) {
case WINDOWS: return "xvd" + letter;
default: return "/dev/sd" + letter;
}
}
public boolean isBsd() {
return (equals(FREE_BSD) || equals(OPEN_BSD));
}
public boolean isLinux() {
switch( this ) {
case SOLARIS: case FREE_BSD: case OPEN_BSD: case WINDOWS: case UNKNOWN: return false;
default: return true;
}
}
public boolean isOpen() {
return (isLinux() || isBsd() || equals(SOLARIS));
}
public boolean isUnix() {
return (!isWindows() && !equals(UNKNOWN));
}
public boolean isWindows() {
return equals(WINDOWS);
}
public String toString() {
switch( this ) {
case UNIX: return "Generic Unix";
case UBUNTU: return "Ubuntu";
case DEBIAN: return "Debian";
case SOLARIS: return "Solaris";
case FEDORA_CORE: return "Fedora";
case RHEL: return "Red Hat";
case SUSE: return "SUSE";
case FREE_BSD: return "FreeBSD";
case OPEN_BSD: return "OpenBSD";
case CENT_OS: return "CentOS";
case COREOS: return "CoreOS";
case WINDOWS: return "Windows";
}
return "Unknown";
}
}
| |
package fr.laas.fape.planning.core.planning.reachability;
import fr.laas.fape.anml.model.concrete.VarRef;
import fr.laas.fape.constraints.bindings.Domain;
import fr.laas.fape.planning.Planning;
import fr.laas.fape.planning.core.planning.grounding.*;
import fr.laas.fape.planning.core.planning.planner.GlobalOptions;
import fr.laas.fape.planning.core.planning.planner.Planner;
import fr.laas.fape.planning.core.planning.preprocessing.Preprocessor;
import fr.laas.fape.planning.core.planning.states.PartialPlan;
import fr.laas.fape.planning.core.planning.timelines.Timeline;
import fr.laas.fape.planning.util.EffSet;
import fr.laas.fape.planning.core.planning.search.Handler;
import fr.laas.fape.structures.IRSet;
import fr.laas.fape.anml.model.LVarRef;
import fr.laas.fape.anml.model.abs.AbstractAction;
import fr.laas.fape.anml.model.concrete.Action;
import fr.laas.fape.anml.model.concrete.Task;
import fr.laas.fape.structures.IntRep;
import java.util.*;
import java.util.stream.Collectors;
public class ReachabilityHandler extends Handler {
private final boolean USE_DECOMPOSITION_VARIABLES = GlobalOptions.getBooleanOption("use-decomposition-variables");
@Override
public void stateBindedToPlanner(PartialPlan st, Planner pl) {
assert !st.hasExtension(CoreReachabilityGraph.StateExt.class);
// init the core of the dependency graph
CoreReachabilityGraph core = new CoreReachabilityGraph(pl.preprocessor.getRelaxedActions(), false, pl.preprocessor.store);
st.addExtension(new CoreReachabilityGraph.StateExt(core));
// Record ground action ids as possible values for variables in the CSP
for (GAction ga : pl.preprocessor.getAllActions()) {
st.csp.bindings().addPossibleValue(ga.id);
}
// record all n ary constraints (action instantiations and task supporters)
Set<String> recordedTask = new HashSet<>();
for (AbstractAction aa : pl.pb.abstractActions()) {
st.csp.bindings().recordEmptyNAryConstraint(aa.name(), true, aa.allVars().length + 1);
st.csp.bindings().addPossibleValue(aa.name());
if (!recordedTask.contains(aa.taskName())) {
st.csp.bindings().recordEmptyNAryConstraint(aa.taskName(), true, aa.args().size() + 2);
recordedTask.add(aa.taskName());
}
}
for (GAction ga : pl.preprocessor.getAllActions()) {
// values for all variables of this action
List<String> values = new LinkedList<>();
for (LVarRef var : ga.variables)
values.add(ga.valueOf(var).instance());
// add possible tuple to instantiation constraints
st.csp.bindings().addAllowedTupleToNAryConstraint(ga.abs.name(), values, ga.id);
// values for arguments of this action
List<String> argValues = new LinkedList<>();
for (LVarRef var : ga.abs.args())
argValues.add(ga.valueOf(var).instance());
argValues.add(ga.abs.name());
// add possible tuple to supporter constraints
st.csp.bindings().addAllowedTupleToNAryConstraint(ga.abs.taskName(), argValues, ga.id);
}
// notify ourselves of the presence of any actions and tasks in the plan
for (Action a : st.getAllActions())
actionInserted(a, st, pl);
for (Task t : st.getOpenTasks())
taskInserted(t, st, pl);
// trigger propagation of constraint networks
st.checkConsistency();
propagateNetwork(st, pl);
st.checkConsistency();
}
@Override
protected void apply(PartialPlan st, StateLifeTime time, Planner planner) {
if (time == StateLifeTime.SELECTION) {
propagateNetwork(st, planner);
}
}
@Override
public void actionInserted(Action act, PartialPlan st, Planner pl) {
if(st.csp.bindings().isRecorded(act.instantiationVar()))
return;
assert !st.csp.bindings().isRecorded(act.instantiationVar()) : "The action already has a variable for its ground versions.";
// all ground versions of this actions (represented by their ID)
LVarRef[] vars = act.abs().allVars();
List<VarRef> values = new ArrayList<>();
for(LVarRef v : vars) {
values.add(act.context().getDefinition(v));
}
// Variable representing the ground versions of this action
st.csp.bindings().addIntVariable(act.instantiationVar(), new Domain(st.addableActions.toBitSet()));
values.add(act.instantiationVar());
st.addValuesSetConstraint(values, act.abs().name());
assert st.csp.bindings().isRecorded(act.instantiationVar());
}
@Override
public void taskInserted(Task task, PartialPlan st, Planner planner) {
if(st.csp.bindings().isRecorded(task.methodSupportersVar()))
return;
assert !st.csp.bindings().isRecorded(task.methodSupportersVar());
assert !st.csp.bindings().isRecorded(task.groundSupportersVar());
Collection<String> supportingMethods = planner.pb.getSupportersForTask(task.name()).stream()
.map(aa -> aa.name()).collect(Collectors.toList());
st.csp.bindings().addVariable(task.methodSupportersVar(), supportingMethods);
st.csp.bindings().addIntVariable(task.groundSupportersVar());
List<VarRef> variables = new LinkedList<>();
variables.addAll(task.args());
variables.add(task.methodSupportersVar());
variables.add(task.groundSupportersVar());
if(USE_DECOMPOSITION_VARIABLES)
st.addValuesSetConstraint(variables, task.name());
}
@Override
public void supportLinkAdded(Action act, Task task, PartialPlan st) {
st.addUnificationConstraint(task.groundSupportersVar(), act.instantiationVar());
}
private void propagateNetwork(PartialPlan st, Planner pl) {
final IntRep<GAction> gactsRep = pl.preprocessor.store.getIntRep(GAction.class);
CoreReachabilityGraph.StateExt ext = st.getExtension(CoreReachabilityGraph.StateExt.class);
final Preprocessor pp = st.pl.preprocessor;
final GroundProblem gpb = pp.getGroundProblem();
List<TempFluent> tempFluents = new ArrayList<>();
// gather all fluents appearing in the partial plan
// those fluents can not be used to support changes
gpb.tempsFluents(st).stream()
.flatMap(tfs -> tfs.fluents.stream().map(f -> new TempFluent(
st.getEarliestStartTime(tfs.timepoints.iterator().next()),
TempFluent.DGFluent.getBasicFluent(f, pp.store))))
.forEach(tempFluents::add);
// gather all fluents achieved and not involved in any causal link
// those cn be used to support transitions
gpb.tempsFluentsThatCanSupportATransition(st).stream()
.flatMap(tfs -> tfs.fluents.stream().map(f -> new TempFluent(
st.getEarliestStartTime(tfs.timepoints.iterator().next()),
TempFluent.DGFluent.getFluentWithChange(f, pp.store))))
.forEach(tempFluents::add);
Set<TempFluent> tasks = new HashSet<>();
for(Task t : st.getOpenTasks()) {
int est = st.getEarliestStartTime(t.start());
for(GAction ga : new EffSet<>(pp.groundActionIntRepresentation(), st.csp.bindings().rawDomain(t.groundSupportersVar()).toBitSet())) {
tasks.add(new TempFluent(est, TempFluent.DGFluent.from(ga.task, st.pb, pp.store)));
}
}
// all facts (fluents an open tasks) in the current state
List<TempFluent> allFacts = new ArrayList<>();
allFacts.addAll(tempFluents);
allFacts.addAll(tasks);
// create new graph from the core graph (actions) and the facts
PartialPlanReachabilityGraph graph = new PartialPlanReachabilityGraph(ext.getCoreGraph(), allFacts, pl);
ext.currentGraph = graph;
graph.propagate(ext.prevGraph);
if(!pp.fluentsInitialized()) {
IRSet<Fluent> fluents = new IRSet<>(pp.store.getIntRep(Fluent.class));
for(Fluent f : graph.fluentsEAs.keys())
fluents.add(f);
pp.setPossibleFluents(fluents);
}
// unsupporting actions // TODO: shouldn't those be in the graph as well
IRSet<GAction> unsupporting = new IRSet<>(gactsRep);
for(Action a : st.getAllActions())
if(!st.taskNet.isSupporting(a))
unsupporting.addAll(st.getGroundActions(a));
// for all open tasks, restrict their set of possible supporters to
// all (i) all non-supporting action in the plan; and (ii) all action that are addable
IRSet<GAction> potentialTaskSupporters = unsupporting.clone();
potentialTaskSupporters.addAll(graph.addableActs);
Domain taskSupportersDom = new Domain(potentialTaskSupporters.toBitSet());
for(Task t : st.getOpenTasks())
st.csp.bindings().restrictDomain(t.groundSupportersVar(), taskSupportersDom);
// all task that can be added to the plan
Set<GTask> addableTasks = new HashSet<>(); // FIXME: use specialized implementation
for(GAction ga : graph.addableActs) {
addableTasks.addAll(ga.subTasks);
}
// all actions that might be attached: (i) those that are open;
// and (ii) those that can be inserted
IRSet<GAction> attachable = new IRSet<>(gactsRep);
for(Task t : st.getOpenTasks()) {
IRSet<GAction> supporters = new IRSet<>(gactsRep, st.csp.bindings().rawDomain(t.groundSupportersVar()).toBitSet());
attachable.addAll(supporters);
}
for(GAction ga : graph.addableActs)
if(addableTasks.contains(ga.task))
attachable.add(ga);
// task-dependent and unattached actions restricted to the set of attachable actions
Domain unattachedDomain = new Domain(attachable.toBitSet());
for(Action a : st.getUnmotivatedActions())
st.csp.bindings().restrictDomain(a.instantiationVar(), unattachedDomain);
// populate the addable actions information in the state. This info is used to filter out resolvers
st.addableActions = new EffSet<>(pl.preprocessor.groundActionIntRepresentation());
st.addableActions.addAll(graph.addableActs);
st.addableTemplates = new HashSet<>();
for(GAction ga : graph.addableActs)
st.addableTemplates.add(ga.abs);
int initialMakespan = Integer.MIN_VALUE;
// declare state a dead end if an open goal is not feasible
for(Timeline og : st.tdb.getConsumers()) {
int latest = st.getLatestStartTime(og.getConsumeTimePoint());
int earliest = st.getEarliestStartTime(og.getConsumeTimePoint());
boolean doable = false;
int optimisticEarliestTime = Integer.MAX_VALUE;
for(Fluent f : DisjunctiveFluent.fluentsOf(og.stateVariable, og.getGlobalConsumeValue(), st, pl)) {
TempFluent.DGFluent dgf;
if(og.hasSinglePersistence())
dgf = TempFluent.DGFluent.getBasicFluent(f, graph.core.store);
else
dgf = TempFluent.DGFluent.getFluentWithChange(f, graph.core.store);
final int ea = graph.earliestAppearances.containsKey(dgf) ? graph.earliestAppearances.get(dgf) : Integer.MAX_VALUE;
if (ea <= latest)
doable = true;
if(optimisticEarliestTime > ea)
optimisticEarliestTime = ea;
}
if(!doable)
// at least one open goal is not achievable
st.setDeadEnd();
else if(earliest < optimisticEarliestTime)
// push back in time we had a too optimistic value
st.enforceDelay(st.pb.start(), og.getConsumeTimePoint(), optimisticEarliestTime);
initialMakespan = Math.max(initialMakespan, optimisticEarliestTime);
}
if(!Planning.quiet && isFirstPass && GlobalOptions.getBooleanOption("reachability-instrumentation")) {
System.out.println("Initial Makespan: "+initialMakespan);
}
st.checkConsistency();
isFirstPass = false;
}
private boolean isFirstPass = true;
}
| |
/*
* Copyright 2011 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit.models;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.gitblit.Constants.AccessPermission;
import com.gitblit.Constants.AccessRestrictionType;
import com.gitblit.Constants.AccountType;
import com.gitblit.Constants.PermissionType;
import com.gitblit.Constants.RegistrantType;
import com.gitblit.utils.StringUtils;
/**
* TeamModel is a serializable model class that represents a group of users and
* a list of accessible repositories.
*
* @author James Moger
*
*/
public class TeamModel implements Serializable, Comparable<TeamModel> {
private static final long serialVersionUID = 1L;
// field names are reflectively mapped in EditTeam page
public String name;
public boolean canAdmin;
public boolean canFork;
public boolean canCreate;
public AccountType accountType;
public final Set<String> users = new HashSet<String>();
// retained for backwards-compatibility with RPC clients
@Deprecated
public final Set<String> repositories = new HashSet<String>();
public final Map<String, AccessPermission> permissions = new LinkedHashMap<String, AccessPermission>();
public final Set<String> mailingLists = new HashSet<String>();
public final List<String> preReceiveScripts = new ArrayList<String>();
public final List<String> postReceiveScripts = new ArrayList<String>();
public TeamModel(String name) {
this.name = name;
this.accountType = AccountType.LOCAL;
}
/**
* Returns a list of repository permissions for this team.
*
* @return the team's list of permissions
*/
public List<RegistrantAccessPermission> getRepositoryPermissions() {
List<RegistrantAccessPermission> list = new ArrayList<RegistrantAccessPermission>();
if (canAdmin) {
// team has REWIND access to all repositories
return list;
}
for (Map.Entry<String, AccessPermission> entry : permissions.entrySet()) {
String registrant = entry.getKey();
String source = null;
boolean editable = true;
PermissionType pType = PermissionType.EXPLICIT;
if (StringUtils.findInvalidCharacter(registrant) != null) {
// a regex will have at least 1 invalid character
pType = PermissionType.REGEX;
source = registrant;
}
list.add(new RegistrantAccessPermission(registrant, entry.getValue(), pType, RegistrantType.REPOSITORY, source, editable));
}
Collections.sort(list);
return list;
}
/**
* Returns true if the team has any type of specified access permission for
* this repository.
*
* @param name
* @return true if team has a specified access permission for the repository
*/
public boolean hasRepositoryPermission(String name) {
String repository = AccessPermission.repositoryFromRole(name).toLowerCase();
if (permissions.containsKey(repository)) {
// exact repository permission specified
return true;
} else {
// search for regex permission match
for (String key : permissions.keySet()) {
if (name.matches(key)) {
AccessPermission p = permissions.get(key);
if (p != null) {
return true;
}
}
}
}
return false;
}
/**
* Returns true if the team has an explicitly specified access permission for
* this repository.
*
* @param name
* @return if the team has an explicitly specified access permission
*/
public boolean hasExplicitRepositoryPermission(String name) {
String repository = AccessPermission.repositoryFromRole(name).toLowerCase();
return permissions.containsKey(repository);
}
/**
* Adds a repository permission to the team.
* <p>
* Role may be formatted as:
* <ul>
* <li> myrepo.git <i>(this is implicitly RW+)</i>
* <li> RW+:myrepo.git
* </ul>
* @param role
*/
public void addRepositoryPermission(String role) {
AccessPermission permission = AccessPermission.permissionFromRole(role);
String repository = AccessPermission.repositoryFromRole(role).toLowerCase();
repositories.add(repository);
permissions.put(repository, permission);
}
public void addRepositoryPermissions(Collection<String> roles) {
for (String role:roles) {
addRepositoryPermission(role);
}
}
public AccessPermission removeRepositoryPermission(String name) {
String repository = AccessPermission.repositoryFromRole(name).toLowerCase();
repositories.remove(repository);
return permissions.remove(repository);
}
public void setRepositoryPermission(String repository, AccessPermission permission) {
if (permission == null) {
// remove the permission
permissions.remove(repository.toLowerCase());
repositories.remove(repository.toLowerCase());
} else {
// set the new permission
permissions.put(repository.toLowerCase(), permission);
repositories.add(repository.toLowerCase());
}
}
public RegistrantAccessPermission getRepositoryPermission(RepositoryModel repository) {
RegistrantAccessPermission ap = new RegistrantAccessPermission();
ap.registrant = name;
ap.registrantType = RegistrantType.TEAM;
ap.permission = AccessPermission.NONE;
ap.mutable = false;
// determine maximum permission for the repository
final AccessPermission maxPermission =
(repository.isFrozen || !repository.isBare || repository.isMirror) ?
AccessPermission.CLONE : AccessPermission.REWIND;
if (AccessRestrictionType.NONE.equals(repository.accessRestriction)) {
// anonymous rewind
ap.permissionType = PermissionType.ANONYMOUS;
if (AccessPermission.REWIND.atMost(maxPermission)) {
ap.permission = AccessPermission.REWIND;
} else {
ap.permission = maxPermission;
}
return ap;
}
if (canAdmin) {
ap.permissionType = PermissionType.ADMINISTRATOR;
if (AccessPermission.REWIND.atMost(maxPermission)) {
ap.permission = AccessPermission.REWIND;
} else {
ap.permission = maxPermission;
}
return ap;
}
if (permissions.containsKey(repository.name.toLowerCase())) {
// exact repository permission specified
AccessPermission p = permissions.get(repository.name.toLowerCase());
if (p != null && repository.accessRestriction.isValidPermission(p)) {
ap.permissionType = PermissionType.EXPLICIT;
if (p.atMost(maxPermission)) {
ap.permission = p;
} else {
ap.permission = maxPermission;
}
ap.mutable = true;
return ap;
}
} else {
// search for case-insensitive regex permission match
for (String key : permissions.keySet()) {
if (StringUtils.matchesIgnoreCase(repository.name, key)) {
AccessPermission p = permissions.get(key);
if (p != null && repository.accessRestriction.isValidPermission(p)) {
// take first match
ap.permissionType = PermissionType.REGEX;
if (p.atMost(maxPermission)) {
ap.permission = p;
} else {
ap.permission = maxPermission;
}
ap.source = key;
return ap;
}
}
}
}
// still no explicit or regex, check for implicit permissions
if (AccessPermission.NONE == ap.permission) {
switch (repository.accessRestriction) {
case VIEW:
// no implicit permissions possible
break;
case CLONE:
// implied view permission
ap.permission = AccessPermission.VIEW;
ap.permissionType = PermissionType.ANONYMOUS;
break;
case PUSH:
// implied clone permission
ap.permission = AccessPermission.CLONE;
ap.permissionType = PermissionType.ANONYMOUS;
break;
case NONE:
// implied REWIND or CLONE
ap.permission = maxPermission;
ap.permissionType = PermissionType.ANONYMOUS;
break;
}
}
return ap;
}
protected boolean canAccess(RepositoryModel repository, AccessRestrictionType ifRestriction, AccessPermission requirePermission) {
if (repository.accessRestriction.atLeast(ifRestriction)) {
RegistrantAccessPermission ap = getRepositoryPermission(repository);
return ap.permission.atLeast(requirePermission);
}
return true;
}
public boolean canView(RepositoryModel repository) {
return canAccess(repository, AccessRestrictionType.VIEW, AccessPermission.VIEW);
}
public boolean canClone(RepositoryModel repository) {
return canAccess(repository, AccessRestrictionType.CLONE, AccessPermission.CLONE);
}
public boolean canPush(RepositoryModel repository) {
if (repository.isFrozen) {
return false;
}
return canAccess(repository, AccessRestrictionType.PUSH, AccessPermission.PUSH);
}
public boolean canCreateRef(RepositoryModel repository) {
if (repository.isFrozen) {
return false;
}
return canAccess(repository, AccessRestrictionType.PUSH, AccessPermission.CREATE);
}
public boolean canDeleteRef(RepositoryModel repository) {
if (repository.isFrozen) {
return false;
}
return canAccess(repository, AccessRestrictionType.PUSH, AccessPermission.DELETE);
}
public boolean canRewindRef(RepositoryModel repository) {
if (repository.isFrozen) {
return false;
}
return canAccess(repository, AccessRestrictionType.PUSH, AccessPermission.REWIND);
}
public boolean hasUser(String name) {
return users.contains(name.toLowerCase());
}
public void addUser(String name) {
users.add(name.toLowerCase());
}
public void addUsers(Collection<String> names) {
for (String name:names) {
users.add(name.toLowerCase());
}
}
public void removeUser(String name) {
users.remove(name.toLowerCase());
}
public void addMailingLists(Collection<String> addresses) {
for (String address:addresses) {
mailingLists.add(address.toLowerCase());
}
}
public boolean isLocalTeam() {
return accountType.isLocal();
}
@Override
public String toString() {
return name;
}
@Override
public int compareTo(TeamModel o) {
return name.compareTo(o.name);
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.bytecodeAnalysis.asm;
import org.jetbrains.org.objectweb.asm.Type;
import org.jetbrains.org.objectweb.asm.tree.*;
import org.jetbrains.org.objectweb.asm.tree.analysis.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Extended version of {@link Analyzer}.
* It handles frames <b>and</b> additional data.
*
* @author lambdamix
*/
public class AnalyzerExt<V extends Value, Data, MyInterpreter extends Interpreter<V> & InterpreterExt<Data>> extends SubroutineFinder {
private final MyInterpreter interpreter;
private final Data[] data;
private Frame<V>[] frames;
private boolean[] queued;
private int[] queue;
private int top;
public AnalyzerExt(MyInterpreter interpreter, Data[] data, Data startData) {
this.interpreter = interpreter;
this.data = data;
if (data.length > 0) {
data[0] = startData;
}
}
public Data[] getData() {
return data;
}
public Frame<V>[] analyze(String owner, MethodNode m) throws AnalyzerException {
if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) {
frames = ASMUtils.newFrameArray(0);
return frames;
}
@SuppressWarnings("unchecked") V refV = (V)BasicValue.REFERENCE_VALUE;
n = m.instructions.size();
insns = m.instructions;
handlers = ASMUtils.newListArray(n);
frames = ASMUtils.newFrameArray(n);
subroutines = new Subroutine[n];
queued = new boolean[n];
queue = new int[n];
top = 0;
// computes exception handlers for each instruction
for (TryCatchBlockNode tcb : m.tryCatchBlocks) {
int begin = insns.indexOf(tcb.start);
int end = insns.indexOf(tcb.end);
for (int j = begin; j < end; ++j) {
List<TryCatchBlockNode> insnHandlers = handlers[j];
if (insnHandlers == null) {
insnHandlers = new ArrayList<>();
handlers[j] = insnHandlers;
}
insnHandlers.add(tcb);
}
}
// computes the subroutine for each instruction:
Subroutine main = new Subroutine(null, m.maxLocals, null);
List<AbstractInsnNode> subroutineCalls = new ArrayList<>();
Map<LabelNode, Subroutine> subroutineHeads = new HashMap<>();
findSubroutine(0, main, subroutineCalls);
while (!subroutineCalls.isEmpty()) {
JumpInsnNode jsr = (JumpInsnNode)subroutineCalls.remove(0);
Subroutine sub = subroutineHeads.get(jsr.label);
if (sub == null) {
sub = new Subroutine(jsr.label, m.maxLocals, jsr);
subroutineHeads.put(jsr.label, sub);
findSubroutine(insns.indexOf(jsr.label), sub, subroutineCalls);
}
else {
sub.callers.add(jsr);
}
}
for (int i = 0; i < n; ++i) {
if (subroutines[i] != null && subroutines[i].start == null) {
subroutines[i] = null;
}
}
// initializes the data structures for the control flow analysis
Frame<V> current = newFrame(m.maxLocals, m.maxStack);
Frame<V> handler = newFrame(m.maxLocals, m.maxStack);
current.setReturn(interpreter.newReturnTypeValue(Type.getReturnType(m.desc)));
Type[] args = Type.getArgumentTypes(m.desc);
int local = 0;
boolean isInstanceMethod = (m.access & ACC_STATIC) == 0;
if (isInstanceMethod) {
Type ctype = Type.getObjectType(owner);
current.setLocal(local, interpreter.newParameterValue(true, local, ctype));
local++;
}
for (Type arg : args) {
current.setLocal(local, interpreter.newParameterValue(isInstanceMethod, local, arg));
local++;
if (arg.getSize() == 2) {
current.setLocal(local, interpreter.newEmptyValue(local));
local++;
}
}
while (local < m.maxLocals) {
current.setLocal(local, interpreter.newEmptyValue(local));
local++;
}
interpreter.init(data[0]);
merge(0, current, null);
// control flow analysis
while (top > 0) {
int insn = queue[--top];
Frame<V> f = frames[insn];
Subroutine subroutine = subroutines[insn];
queued[insn] = false;
AbstractInsnNode insnNode = null;
try {
insnNode = m.instructions.get(insn);
int insnOpcode = insnNode.getOpcode();
int insnType = insnNode.getType();
if (insnType == AbstractInsnNode.LABEL
|| insnType == AbstractInsnNode.LINE
|| insnType == AbstractInsnNode.FRAME) {
interpreter.init(data[insn]);
merge(insn + 1, f, subroutine);
}
else {
// delta
interpreter.init(data[insn]);
current.init(f).execute(insnNode, interpreter);
subroutine = subroutine == null ? null : subroutine.copy();
if (insnNode instanceof JumpInsnNode) {
JumpInsnNode j = (JumpInsnNode)insnNode;
if (insnOpcode != GOTO && insnOpcode != JSR) {
merge(insn + 1, current, subroutine);
}
int jump = insns.indexOf(j.label);
if (insnOpcode == JSR) {
merge(jump, current, new Subroutine(j.label,
m.maxLocals, j));
}
else {
merge(jump, current, subroutine);
}
}
else if (insnNode instanceof LookupSwitchInsnNode) {
LookupSwitchInsnNode lsi = (LookupSwitchInsnNode)insnNode;
int jump = insns.indexOf(lsi.dflt);
merge(jump, current, subroutine);
for (LabelNode label : lsi.labels) {
jump = insns.indexOf(label);
merge(jump, current, subroutine);
}
}
else if (insnNode instanceof TableSwitchInsnNode) {
TableSwitchInsnNode tsi = (TableSwitchInsnNode)insnNode;
int jump = insns.indexOf(tsi.dflt);
merge(jump, current, subroutine);
for (LabelNode label : tsi.labels) {
jump = insns.indexOf(label);
merge(jump, current, subroutine);
}
}
else if (insnOpcode == RET) {
if (subroutine == null) {
throw new AnalyzerException(insnNode,
"RET instruction outside of a sub routine");
}
for (int i = 0; i < subroutine.callers.size(); ++i) {
JumpInsnNode caller = subroutine.callers.get(i);
int call = insns.indexOf(caller);
if (frames[call] != null) {
merge(call + 1, frames[call], current,
subroutines[call], subroutine.access);
}
}
}
else if (insnOpcode != ATHROW && (insnOpcode < IRETURN || insnOpcode > RETURN)) {
if (subroutine != null) {
if (insnNode instanceof VarInsnNode) {
int var = ((VarInsnNode)insnNode).var;
subroutine.access[var] = true;
if (insnOpcode == LLOAD || insnOpcode == DLOAD
|| insnOpcode == LSTORE
|| insnOpcode == DSTORE) {
subroutine.access[var + 1] = true;
}
}
else if (insnNode instanceof IincInsnNode) {
int var = ((IincInsnNode)insnNode).var;
subroutine.access[var] = true;
}
}
merge(insn + 1, current, subroutine);
}
}
List<TryCatchBlockNode> insnHandlers = handlers[insn];
if (insnHandlers != null) {
for (TryCatchBlockNode tcb : insnHandlers) {
int jump = insns.indexOf(tcb.handler);
handler.init(f);
handler.clearStack();
handler.push(refV);
merge(jump, handler, subroutine);
}
}
}
catch (AnalyzerException e) {
throw new AnalyzerException(e.node, "Error at instruction " + insn + ": " + e.getMessage(), e);
}
catch (Exception e) {
throw new AnalyzerException(insnNode, "Error at instruction " + insn + ": " + e.getMessage(), e);
}
}
return frames;
}
public Frame<V>[] getFrames() {
return frames;
}
protected Frame<V> newFrame(int nLocals, int nStack) {
return new Frame<>(nLocals, nStack);
}
protected Frame<V> newFrame(Frame<? extends V> src) {
return new Frame<>(src);
}
// -------------------------------------------------------------------------
private void merge(int insn, Frame<V> frame, Subroutine subroutine) throws AnalyzerException {
Frame<V> oldFrame = frames[insn];
Subroutine oldSubroutine = subroutines[insn];
boolean changes;
if (oldFrame == null) {
frames[insn] = newFrame(frame);
changes = true;
}
else {
changes = oldFrame.merge(frame, interpreter);
}
if (oldSubroutine == null) {
if (subroutine != null) {
subroutines[insn] = subroutine.copy();
changes = true;
}
}
else if (subroutine != null) {
changes |= oldSubroutine.merge(subroutine);
}
if (changes && !queued[insn]) {
queued[insn] = true;
queue[top++] = insn;
}
// delta
mergeData(insn, interpreter);
}
private void merge(int insn, Frame<V> beforeJSR,
Frame<V> afterRET, Subroutine subroutineBeforeJSR,
boolean[] access) throws AnalyzerException {
Frame<V> oldFrame = frames[insn];
Subroutine oldSubroutine = subroutines[insn];
boolean changes;
afterRET.merge(beforeJSR, access);
if (oldFrame == null) {
frames[insn] = newFrame(afterRET);
changes = true;
}
else {
changes = oldFrame.merge(afterRET, interpreter);
}
if (oldSubroutine != null && subroutineBeforeJSR != null) {
changes |= oldSubroutine.merge(subroutineBeforeJSR);
}
if (changes && !queued[insn]) {
queued[insn] = true;
queue[top++] = insn;
}
// delta
mergeData(insn, interpreter);
}
private void mergeData(int insn, MyInterpreter interpreter) {
boolean changes = false;
Data oldData = data[insn];
Data newData = interpreter.getAfterData(insn);
if (oldData == null) {
data[insn] = newData;
changes = true;
}
else if (newData != null) {
Data mergedData = interpreter.merge(oldData, newData);
data[insn] = mergedData;
changes = !oldData.equals(mergedData);
}
if (changes && !queued[insn]) {
queued[insn] = true;
queue[top++] = insn;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilder> {
@Override
protected BoolQueryBuilder doCreateTestQueryBuilder() {
BoolQueryBuilder query = new BoolQueryBuilder();
if (randomBoolean()) {
query.adjustPureNegative(randomBoolean());
}
if (randomBoolean()) {
query.disableCoord(randomBoolean());
}
if (randomBoolean()) {
query.minimumShouldMatch(randomMinimumShouldMatch());
}
int mustClauses = randomIntBetween(0, 3);
for (int i = 0; i < mustClauses; i++) {
query.must(RandomQueryBuilder.createQuery(random()));
}
int mustNotClauses = randomIntBetween(0, 3);
for (int i = 0; i < mustNotClauses; i++) {
query.mustNot(RandomQueryBuilder.createQuery(random()));
}
int shouldClauses = randomIntBetween(0, 3);
for (int i = 0; i < shouldClauses; i++) {
query.should(RandomQueryBuilder.createQuery(random()));
}
int filterClauses = randomIntBetween(0, 3);
for (int i = 0; i < filterClauses; i++) {
query.filter(RandomQueryBuilder.createQuery(random()));
}
return query;
}
@Override
protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
if (!queryBuilder.hasClauses()) {
assertThat(query, instanceOf(MatchAllDocsQuery.class));
} else {
QueryShardContext context = searchContext.getQueryShardContext();
List<BooleanClause> clauses = new ArrayList<>();
clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context));
clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context));
clauses.addAll(getBooleanClauses(queryBuilder.should(), BooleanClause.Occur.SHOULD, context));
clauses.addAll(getBooleanClauses(queryBuilder.filter(), BooleanClause.Occur.FILTER, context));
if (clauses.isEmpty()) {
assertThat(query, instanceOf(MatchAllDocsQuery.class));
} else {
assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.isCoordDisabled(), equalTo(queryBuilder.disableCoord()));
if (queryBuilder.adjustPureNegative()) {
boolean isNegative = true;
for (BooleanClause clause : clauses) {
if (clause.isProhibited() == false) {
isNegative = false;
break;
}
}
if (isNegative) {
clauses.add(new BooleanClause(new MatchAllDocsQuery(), BooleanClause.Occur.MUST));
}
}
assertThat(booleanQuery.clauses().size(), equalTo(clauses.size()));
Iterator<BooleanClause> clauseIterator = clauses.iterator();
for (BooleanClause booleanClause : booleanQuery.clauses()) {
assertThat(booleanClause, instanceOf(clauseIterator.next().getClass()));
}
}
}
}
private static List<BooleanClause> getBooleanClauses(List<QueryBuilder> queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException {
List<BooleanClause> clauses = new ArrayList<>();
for (QueryBuilder query : queryBuilders) {
Query innerQuery = query.toQuery(context);
if (innerQuery != null) {
clauses.add(new BooleanClause(innerQuery, occur));
}
}
return clauses;
}
@Override
protected Map<String, BoolQueryBuilder> getAlternateVersions() {
Map<String, BoolQueryBuilder> alternateVersions = new HashMap<>();
BoolQueryBuilder tempQueryBuilder = createTestQueryBuilder();
BoolQueryBuilder expectedQuery = new BoolQueryBuilder();
String contentString = "{\n" +
" \"bool\" : {\n";
if (tempQueryBuilder.must().size() > 0) {
QueryBuilder must = tempQueryBuilder.must().get(0);
contentString += "\"must\": " + must.toString() + ",";
expectedQuery.must(must);
}
if (tempQueryBuilder.mustNot().size() > 0) {
QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0);
contentString += (randomBoolean() ? "\"must_not\": " : "\"mustNot\": ") + mustNot.toString() + ",";
expectedQuery.mustNot(mustNot);
}
if (tempQueryBuilder.should().size() > 0) {
QueryBuilder should = tempQueryBuilder.should().get(0);
contentString += "\"should\": " + should.toString() + ",";
expectedQuery.should(should);
}
if (tempQueryBuilder.filter().size() > 0) {
QueryBuilder filter = tempQueryBuilder.filter().get(0);
contentString += "\"filter\": " + filter.toString() + ",";
expectedQuery.filter(filter);
}
contentString = contentString.substring(0, contentString.length() - 1);
contentString += " } \n" + "}";
alternateVersions.put(contentString, expectedQuery);
return alternateVersions;
}
public void testIllegalArguments() {
BoolQueryBuilder booleanQuery = new BoolQueryBuilder();
expectThrows(IllegalArgumentException.class, () -> booleanQuery.must(null));
expectThrows(IllegalArgumentException.class, () -> booleanQuery.mustNot(null));
expectThrows(IllegalArgumentException.class, () -> booleanQuery.filter(null));
expectThrows(IllegalArgumentException.class, () -> booleanQuery.should(null));
}
// https://github.com/elastic/elasticsearch/issues/7240
public void testEmptyBooleanQuery() throws Exception {
XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
contentBuilder.startObject().startObject("bool").endObject().endObject();
Query parsedQuery = parseQuery(createParser(contentBuilder)).toQuery(createShardContext());
assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class));
}
public void testDefaultMinShouldMatch() throws Exception {
// Queries have a minShouldMatch of 0
BooleanQuery bq = (BooleanQuery) parseQuery(boolQuery().must(termQuery("foo", "bar"))).toQuery(createShardContext());
assertEquals(0, bq.getMinimumNumberShouldMatch());
bq = (BooleanQuery) parseQuery(boolQuery().should(termQuery("foo", "bar"))).toQuery(createShardContext());
assertEquals(0, bq.getMinimumNumberShouldMatch());
// Filters have a minShouldMatch of 0/1
ConstantScoreQuery csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().must(termQuery("foo", "bar")))).toQuery(createShardContext());
bq = (BooleanQuery) csq.getQuery();
assertEquals(0, bq.getMinimumNumberShouldMatch());
csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().should(termQuery("foo", "bar")))).toQuery(createShardContext());
bq = (BooleanQuery) csq.getQuery();
assertEquals(1, bq.getMinimumNumberShouldMatch());
}
public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new BoolQueryBuilder().must(new MatchAllQueryBuilder()));
Query query = boolQueryBuilder.toQuery(createShardContext());
assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0));
assertThat(booleanQuery.clauses().size(), equalTo(1));
BooleanClause booleanClause = booleanQuery.clauses().get(0);
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER));
assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class));
BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery();
//we didn't set minimum should match initially, there are no should clauses so it should be 0
assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(0));
assertThat(innerBooleanQuery.clauses().size(), equalTo(1));
BooleanClause innerBooleanClause = innerBooleanQuery.clauses().get(0);
assertThat(innerBooleanClause.getOccur(), equalTo(BooleanClause.Occur.MUST));
assertThat(innerBooleanClause.getQuery(), instanceOf(MatchAllDocsQuery.class));
}
public void testMinShouldMatchFilterWithShouldClauses() throws Exception {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.filter(new BoolQueryBuilder().must(new MatchAllQueryBuilder()).should(new MatchAllQueryBuilder()));
Query query = boolQueryBuilder.toQuery(createShardContext());
assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(0));
assertThat(booleanQuery.clauses().size(), equalTo(1));
BooleanClause booleanClause = booleanQuery.clauses().get(0);
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.FILTER));
assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class));
BooleanQuery innerBooleanQuery = (BooleanQuery) booleanClause.getQuery();
//we didn't set minimum should match initially, but there are should clauses so it should be 1
assertThat(innerBooleanQuery.getMinimumNumberShouldMatch(), equalTo(1));
assertThat(innerBooleanQuery.clauses().size(), equalTo(2));
BooleanClause innerBooleanClause1 = innerBooleanQuery.clauses().get(0);
assertThat(innerBooleanClause1.getOccur(), equalTo(BooleanClause.Occur.MUST));
assertThat(innerBooleanClause1.getQuery(), instanceOf(MatchAllDocsQuery.class));
BooleanClause innerBooleanClause2 = innerBooleanQuery.clauses().get(1);
assertThat(innerBooleanClause2.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
assertThat(innerBooleanClause2.getQuery(), instanceOf(MatchAllDocsQuery.class));
}
public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception {
BooleanQuery bq = (BooleanQuery) parseQuery(
boolQuery()
.should(termQuery("foo", "bar"))
.should(termQuery("foo2", "bar2"))
.minimumShouldMatch("3")).toQuery(createShardContext());
assertEquals(3, bq.getMinimumNumberShouldMatch());
bq = (BooleanQuery) parseQuery(
boolQuery()
.should(termQuery("foo", "bar"))
.should(termQuery("foo2", "bar2"))
.minimumShouldMatch(3)).toQuery(createShardContext());
assertEquals(3, bq.getMinimumNumberShouldMatch());
}
public void testMinShouldMatchDisableCoord() throws Exception {
BooleanQuery bq = (BooleanQuery) parseQuery(
boolQuery()
.should(termQuery("foo", "bar"))
.should(termQuery("foo2", "bar2"))
.minimumShouldMatch("3")
.disableCoord(true)).toQuery(createShardContext());
assertEquals(3, bq.getMinimumNumberShouldMatch());
}
public void testFromJson() throws IOException {
String query =
"{" +
"\"bool\" : {" +
" \"must\" : [ {" +
" \"term\" : {" +
" \"user\" : {" +
" \"value\" : \"kimchy\"," +
" \"boost\" : 1.0" +
" }" +
" }" +
" } ]," +
" \"filter\" : [ {" +
" \"term\" : {" +
" \"tag\" : {" +
" \"value\" : \"tech\"," +
" \"boost\" : 1.0" +
" }" +
" }" +
" } ]," +
" \"must_not\" : [ {" +
" \"range\" : {" +
" \"age\" : {" +
" \"from\" : 10," +
" \"to\" : 20," +
" \"include_lower\" : true," +
" \"include_upper\" : true," +
" \"boost\" : 1.0" +
" }" +
" }" +
" } ]," +
" \"should\" : [ {" +
" \"term\" : {" +
" \"tag\" : {" +
" \"value\" : \"wow\"," +
" \"boost\" : 1.0" +
" }" +
" }" +
" }, {" +
" \"term\" : {" +
" \"tag\" : {" +
" \"value\" : \"elasticsearch\"," +
" \"boost\" : 1.0" +
" }" +
" }" +
" } ]," +
" \"disable_coord\" : false," +
" \"adjust_pure_negative\" : true," +
" \"minimum_should_match\" : \"23\"," +
" \"boost\" : 42.0" +
"}" +
"}";
BoolQueryBuilder queryBuilder = (BoolQueryBuilder) parseQuery(query);
checkGeneratedJson(query, queryBuilder);
assertEquals(query, 42, queryBuilder.boost, 0.00001);
assertEquals(query, "23", queryBuilder.minimumShouldMatch());
assertEquals(query, "kimchy", ((TermQueryBuilder)queryBuilder.must().get(0)).value());
}
/**
* we ignore empty query bodies if we are not in strict mode
*/
public void testFromJsonEmptyQueryBody() throws IOException {
String query =
"{" +
"\"bool\" : {" +
" \"must\" : [ { } ]," +
" \"filter\" : { }," +
" \"must_not\" : [ { \"constant_score\" : {\"filter\" : { } } } ]" +
"}" +
"}";
BoolQueryBuilder queryBuilder = (BoolQueryBuilder) parseQuery(query);
assertEquals(query, 0, queryBuilder.must().size());
assertEquals(query, 0, queryBuilder.filter().size());
assertEquals(query, 0, queryBuilder.mustNot().size());
assertEquals(query, 0, queryBuilder.should().size());
// we should have deprecation warning headers regardless of throwing an exception
assertWarnings("query malformed, empty clause found at [1:27]",
"query malformed, empty clause found at [1:46]",
"query malformed, empty clause found at [1:100]");
}
/**
* We deprecated `minimum_number_should_match`, it should still parse correctly but add a warning header
*/
public void testMinimumNumberShouldMatchDeprecated() throws IOException {
String query =
"{" +
"\"bool\" : {" +
" \"should\" : { " +
" \"term\" : {" +
" \"tag\" : {" +
" \"value\" : \"wow\"," +
" \"boost\" : 1.0" +
" }" +
" } " +
" }," +
" \"minimum_number_should_match\" : 1" +
"}}";
BoolQueryBuilder queryBuilder = (BoolQueryBuilder) parseQuery(query);
assertEquals(query, 1, queryBuilder.should().size());
assertEquals(query, "1", queryBuilder.minimumShouldMatch());
// we should have deprecation warning headers regardless of throwing an exception
assertWarnings("Deprecated field [minimum_number_should_match] used, expected [minimum_should_match] instead");
}
/**
* test that unknown query names in the clauses throw an error
*/
public void testUnknownQueryName() throws IOException {
String query = "{\"bool\" : {\"must\" : { \"unknown_query\" : { } } } }";
ParsingException ex = expectThrows(ParsingException.class, () -> parseQuery(query));
assertEquals("no [query] registered for [unknown_query]", ex.getMessage());
}
/**
* test that two queries in object throws error
*/
public void testTooManyQueriesInObject() throws IOException {
String clauseType = randomFrom("must", "should", "must_not", "filter");
// should also throw error if invalid query is preceded by a valid one
String query = "{\n" +
" \"bool\": {\n" +
" \"" + clauseType + "\": {\n" +
" \"match\": {\n" +
" \"foo\": \"bar\"\n" +
" },\n" +
" \"match\": {\n" +
" \"baz\": \"buzz\"\n" +
" }\n" +
" }\n" +
" }\n" +
"}";
ParsingException ex = expectThrows(ParsingException.class, () -> parseQuery(query));
assertEquals("[match] malformed query, expected [END_OBJECT] but found [FIELD_NAME]", ex.getMessage());
}
public void testRewrite() throws IOException {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolean mustRewrite = false;
if (randomBoolean()) {
mustRewrite = true;
boolQueryBuilder.must(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "must").toString()));
}
if (randomBoolean()) {
mustRewrite = true;
boolQueryBuilder.should(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "should").toString()));
}
if (randomBoolean()) {
mustRewrite = true;
boolQueryBuilder.filter(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "filter").toString()));
}
if (randomBoolean()) {
mustRewrite = true;
boolQueryBuilder.mustNot(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "must_not").toString()));
}
if (mustRewrite == false && randomBoolean()) {
boolQueryBuilder.must(new TermsQueryBuilder("foo", "no_rewrite"));
}
QueryBuilder rewritten = boolQueryBuilder.rewrite(createShardContext());
if (mustRewrite == false && boolQueryBuilder.must().isEmpty()) {
// if it's empty we rewrite to match all
assertEquals(rewritten, new MatchAllQueryBuilder());
} else {
BoolQueryBuilder rewrite = (BoolQueryBuilder) rewritten;
if (mustRewrite) {
assertNotSame(rewrite, boolQueryBuilder);
if (boolQueryBuilder.must().isEmpty() == false) {
assertEquals(new TermsQueryBuilder("foo", "must"), rewrite.must().get(0));
}
if (boolQueryBuilder.should().isEmpty() == false) {
assertEquals(new TermsQueryBuilder("foo", "should"), rewrite.should().get(0));
}
if (boolQueryBuilder.mustNot().isEmpty() == false) {
assertEquals(new TermsQueryBuilder("foo", "must_not"), rewrite.mustNot().get(0));
}
if (boolQueryBuilder.filter().isEmpty() == false) {
assertEquals(new TermsQueryBuilder("foo", "filter"), rewrite.filter().get(0));
}
} else {
assertSame(rewrite, boolQueryBuilder);
if (boolQueryBuilder.must().isEmpty() == false) {
assertSame(boolQueryBuilder.must().get(0), rewrite.must().get(0));
}
}
}
}
public void testRewriteMultipleTimes() throws IOException {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.must(new WrapperQueryBuilder(new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()).toString()));
QueryBuilder rewritten = boolQueryBuilder.rewrite(createShardContext());
BoolQueryBuilder expected = new BoolQueryBuilder();
expected.must(new MatchAllQueryBuilder());
assertEquals(expected, rewritten);
expected = new BoolQueryBuilder();
expected.must(new MatchAllQueryBuilder());
QueryBuilder rewrittenAgain = rewritten.rewrite(createShardContext());
assertEquals(rewrittenAgain, expected);
assertEquals(QueryBuilder.rewriteQuery(boolQueryBuilder, createShardContext()), expected);
}
}
| |
/*
Derby - Class org.apache.derby.impl.store.raw.data.TempRAFContainer
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.store.raw.data;
import org.apache.derby.impl.store.raw.data.BaseContainerHandle;
import org.apache.derby.impl.store.raw.data.BasePage;
import org.apache.derby.iapi.services.cache.Cacheable;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.store.raw.xact.RawTransaction;
import org.apache.derby.iapi.store.raw.ContainerHandle;
import org.apache.derby.iapi.store.raw.ContainerKey;
import org.apache.derby.iapi.store.raw.Page;
import org.apache.derby.iapi.store.raw.log.LogInstant;
import org.apache.derby.iapi.store.raw.data.RawContainerHandle;
import org.apache.derby.io.StorageFactory;
import org.apache.derby.io.StorageFile;
import java.io.IOException;
/**
needsSync is never true - DONE
An exception never marks the store as corrupt
clean() does not stubbify
preAllocate() does nothing - DONE
getFileName() returns a file in the tmp directory - DONE
flushAll does nothing - DONE
file descriptor is never synced
*/
class TempRAFContainer extends RAFContainer {
protected int inUseCount;
TempRAFContainer(BaseDataFileFactory factory) {
super(factory);
}
/**
@exception StandardException Standard Derby error policy
*/
public Cacheable setIdentity(Object key) throws StandardException {
ContainerKey newIdentity = (ContainerKey) key;
if (newIdentity.getSegmentId() != ContainerHandle.TEMPORARY_SEGMENT) {
RAFContainer realContainer = new RAFContainer(dataFactory);
return realContainer.setIdent(newIdentity);
}
return super.setIdentity(newIdentity);
}
/**
@exception StandardException Standard Derby error policy
*/
public Cacheable createIdentity(Object key, Object createParameter) throws StandardException {
ContainerKey newIdentity = (ContainerKey) key;
if (newIdentity.getSegmentId() != ContainerHandle.TEMPORARY_SEGMENT) {
RAFContainer realContainer = new RAFContainer(dataFactory);
return realContainer.createIdentity(newIdentity, createParameter);
}
return createIdent(newIdentity, createParameter);
}
/**
@exception StandardException Standard Derby error policy
*/
public void removeContainer(LogInstant instant, boolean leaveStub) throws StandardException
{
// discard all of my pages in the cache
pageCache.discard(identity);
synchronized(this) {
// prevent anybody from looking at this container again
setDroppedState(true);
setCommittedDropState(true);
setDirty(false);
needsSync = false;
}
removeFile(getFileName(identity, false, false, false));
}
/**
Preallocate page. Since we don't sync when we write page anyway, no
need to preallocate page.
*/
protected int preAllocate(long lastPreallocPagenum, int preAllocSize)
{
return 0;
}
/**
Write the page, if it's within range of the current page range of the container.
If we do write it then don't request that it be synced.
@exception StandardException Standard Derby error policy
*/
protected void writePage(long pageNumber, byte[] pageData, boolean syncPage) throws IOException, StandardException {
if (!this.getDroppedState()) {
super.writePage(pageNumber, pageData, false);
}
needsSync = false;
}
StorageFile getFileName(ContainerKey identity, boolean stub,
boolean errorOK, boolean tryAlternatePath)
{
return privGetFileName( identity, stub, errorOK, tryAlternatePath);
}
protected StorageFile privGetFileName(ContainerKey identity, boolean stub,
boolean errorOK, boolean tryAlternatePath)
{
return dataFactory.storageFactory.newStorageFile( dataFactory.storageFactory.getTempDir(),
"T" + identity.getContainerId() + ".tmp");
}
/**
Add a page without locking the container, only one user will be accessing this
table at a time.
@exception StandardException Standard Derby error policy
*/
public Page addPage(BaseContainerHandle handle, boolean isOverflow) throws StandardException {
BasePage newPage = newPage(handle, (RawTransaction) null, handle, isOverflow);
if (SanityManager.DEBUG) {
SanityManager.ASSERT(newPage.isLatched());
}
return newPage;
}
/**
@exception StandardException Standard Derby error policy
*/
public void truncate(BaseContainerHandle handle) throws StandardException {
// stop anyone from writing any of my pages out
synchronized(this)
{
setDroppedState(true);
setCommittedDropState(true);
setDirty(false);
needsSync = false;
}
// discard all of my pages in the cache
while (pageCache.discard(identity) != true)
;
removeFile(getFileName(identity, false, true, false));
createIdent(identity, this);
addPage(handle, false).unlatch();
}
/**
Lock the container and mark the container as in-use by this container handle.
@param droppedOK if true, use this container even if it is dropped.,
@return true if the container can be used, false if it has been dropped
since the lock was requested and droppedOK is not true.
@exception StandardException I cannot be opened for update.
*/
protected boolean use(BaseContainerHandle handle, boolean forUpdate,
boolean droppedOK)
throws StandardException {
if (super.use(handle, forUpdate, droppedOK)) {
inUseCount++;
return true;
}
return false;
}
/**
Discontinue use of this container. Note that the unlockContainer
call made from this method may not release any locks. The container
lock may be held until the end of the transaction.
*/
protected void letGo(BaseContainerHandle handle) {
inUseCount--;
super.letGo(handle);
}
/**
Returns true if only a single handle is connected to this container.
*/
public boolean isSingleUser() {
return inUseCount == 1;
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.api;
import org.act.temporalProperty.query.range.TimeRangeQuery;
import java.util.List;
import org.neo4j.collection.primitive.PrimitiveIntIterator;
import org.neo4j.collection.primitive.PrimitiveLongIterator;
import org.neo4j.cursor.Cursor;
import org.neo4j.kernel.api.cursor.NodeItem;
import org.neo4j.kernel.api.cursor.RelationshipItem;
import org.neo4j.kernel.api.exceptions.EntityNotFoundException;
import org.neo4j.kernel.api.exceptions.PropertyNotFoundException;
import org.neo4j.kernel.api.exceptions.index.IndexNotFoundKernelException;
import org.neo4j.kernel.api.exceptions.schema.ConstraintValidationKernelException;
import org.neo4j.kernel.api.exceptions.schema.IndexBrokenKernelException;
import org.neo4j.kernel.api.index.IndexDescriptor;
import org.neo4j.kernel.api.properties.DefinedProperty;
import org.neo4j.kernel.api.properties.Property;
import org.neo4j.kernel.api.txstate.TxStateHolder;
import org.neo4j.kernel.guard.Guard;
import org.neo4j.kernel.impl.api.operations.EntityReadOperations;
import org.neo4j.kernel.impl.api.operations.EntityWriteOperations;
import org.neo4j.kernel.impl.api.store.RelationshipIterator;
import org.neo4j.kernel.impl.api.store.StoreStatement;
import org.neo4j.temporal.IntervalEntry;
import org.neo4j.temporal.TemporalIndexManager;
import org.neo4j.temporal.TemporalPropertyReadOperation;
import org.neo4j.temporal.TemporalPropertyWriteOperation;
public class GuardingStatementOperations implements
EntityWriteOperations,
EntityReadOperations
{
private final EntityWriteOperations entityWriteDelegate;
private final EntityReadOperations entityReadDelegate;
private final Guard guard;
public GuardingStatementOperations(
EntityWriteOperations entityWriteDelegate,
EntityReadOperations entityReadDelegate,
Guard guard )
{
this.entityWriteDelegate = entityWriteDelegate;
this.entityReadDelegate = entityReadDelegate;
this.guard = guard;
}
@Override
public List<IntervalEntry> getTemporalPropertyByIndex( KernelStatement statement, TemporalIndexManager.PropertyValueIntervalBuilder builder )
{
guard.check();
return entityReadDelegate.getTemporalPropertyByIndex( statement, builder );
}
@Override
public Object nodeGetTemporalProperty(KernelStatement statement, TemporalPropertyReadOperation query) throws EntityNotFoundException, PropertyNotFoundException
{
guard.check();
return entityReadDelegate.nodeGetTemporalProperty(statement, query);
}
@Override
public Object relationshipGetTemporalProperty(KernelStatement statement, TemporalPropertyReadOperation query ) throws EntityNotFoundException, PropertyNotFoundException
{
guard.check();
return entityReadDelegate.relationshipGetTemporalProperty(statement, query);
}
@Override
public void nodeSetTemporalProperty(KernelStatement statement, TemporalPropertyWriteOperation operation) throws EntityNotFoundException, ConstraintValidationKernelException
{
guard.check();
entityWriteDelegate.nodeSetTemporalProperty(statement, operation);
}
@Override
public void relationshipSetTemporalProperty(KernelStatement statement, TemporalPropertyWriteOperation operation) throws EntityNotFoundException, ConstraintValidationKernelException
{
guard.check();
entityWriteDelegate.relationshipSetTemporalProperty(statement, operation);
}
@Override
public long relationshipCreate( KernelStatement statement,
int relationshipTypeId,
long startNodeId,
long endNodeId )
throws EntityNotFoundException
{
guard.check();
return entityWriteDelegate.relationshipCreate( statement, relationshipTypeId, startNodeId, endNodeId );
}
@Override
public long nodeCreate( KernelStatement statement )
{
guard.check();
return entityWriteDelegate.nodeCreate( statement );
}
@Override
public void nodeDelete( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
guard.check();
entityWriteDelegate.nodeDelete( state, nodeId );
}
@Override
public int nodeDetachDelete( KernelStatement state, long nodeId ) throws EntityNotFoundException
{
guard.check();
return entityWriteDelegate.nodeDetachDelete( state, nodeId );
}
@Override
public void relationshipDelete( KernelStatement state, long relationshipId ) throws EntityNotFoundException
{
guard.check();
entityWriteDelegate.relationshipDelete( state, relationshipId );
}
@Override
public boolean nodeAddLabel( KernelStatement state, long nodeId, int labelId )
throws ConstraintValidationKernelException, EntityNotFoundException
{
guard.check();
return entityWriteDelegate.nodeAddLabel( state, nodeId, labelId );
}
@Override
public boolean nodeRemoveLabel( KernelStatement state, long nodeId, int labelId ) throws EntityNotFoundException
{
guard.check();
return entityWriteDelegate.nodeRemoveLabel( state, nodeId, labelId );
}
@Override
public Property nodeSetProperty( KernelStatement state, long nodeId, DefinedProperty property )
throws ConstraintValidationKernelException, EntityNotFoundException
{
guard.check();
return entityWriteDelegate.nodeSetProperty( state, nodeId, property );
}
@Override
public Property relationshipSetProperty( KernelStatement state,
long relationshipId,
DefinedProperty property ) throws EntityNotFoundException
{
guard.check();
return entityWriteDelegate.relationshipSetProperty( state, relationshipId, property );
}
@Override
public Property graphSetProperty( KernelStatement state, DefinedProperty property )
{
guard.check();
return entityWriteDelegate.graphSetProperty( state, property );
}
@Override
public Property nodeRemoveProperty( KernelStatement state, long nodeId, int propertyKeyId )
throws EntityNotFoundException
{
guard.check();
return entityWriteDelegate.nodeRemoveProperty( state, nodeId, propertyKeyId );
}
@Override
public Property relationshipRemoveProperty( KernelStatement state,
long relationshipId,
int propertyKeyId ) throws EntityNotFoundException
{
guard.check();
return entityWriteDelegate.relationshipRemoveProperty( state, relationshipId, propertyKeyId );
}
@Override
public Property graphRemoveProperty( KernelStatement state, int propertyKeyId )
{
guard.check();
return entityWriteDelegate.graphRemoveProperty( state, propertyKeyId );
}
@Override
public PrimitiveLongIterator nodesGetForLabel( KernelStatement state, int labelId )
{
guard.check();
return entityReadDelegate.nodesGetForLabel( state, labelId );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexSeek( KernelStatement state, IndexDescriptor index, Object value )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodesGetFromIndexSeek( state, index, value );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexRangeSeekByNumber( KernelStatement state,
IndexDescriptor index,
Number lower, boolean includeLower,
Number upper, boolean includeUpper )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodesGetFromIndexRangeSeekByNumber( state, index, lower, includeLower, upper,
includeUpper );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexRangeSeekByString( KernelStatement state,
IndexDescriptor index,
String lower, boolean includeLower,
String upper, boolean includeUpper )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodesGetFromIndexRangeSeekByString( state, index, lower, includeLower, upper,
includeUpper );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexRangeSeekByPrefix( KernelStatement state, IndexDescriptor index,
String prefix ) throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodesGetFromIndexRangeSeekByPrefix( state, index, prefix );
}
@Override
public PrimitiveLongIterator nodesGetFromIndexScan( KernelStatement state, IndexDescriptor index )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodesGetFromIndexScan( state, index );
}
@Override
public long nodeGetFromUniqueIndexSeek( KernelStatement state, IndexDescriptor index, Object value )
throws IndexNotFoundKernelException, IndexBrokenKernelException
{
guard.check();
return entityReadDelegate.nodeGetFromUniqueIndexSeek( state, index, value );
}
@Override
public boolean graphHasProperty( KernelStatement state, int propertyKeyId )
{
guard.check();
return entityReadDelegate.graphHasProperty( state, propertyKeyId );
}
@Override
public Object graphGetProperty( KernelStatement state, int propertyKeyId )
{
guard.check();
return entityReadDelegate.graphGetProperty( state, propertyKeyId );
}
@Override
public PrimitiveIntIterator graphGetPropertyKeys( KernelStatement state )
{
guard.check();
return entityReadDelegate.graphGetPropertyKeys( state );
}
@Override
public PrimitiveLongIterator nodesGetAll( KernelStatement state )
{
guard.check();
return entityReadDelegate.nodesGetAll( state );
}
@Override
public PrimitiveLongIterator relationshipsGetAll( KernelStatement state )
{
guard.check();
return entityReadDelegate.relationshipsGetAll( state );
}
@Override
public <EXCEPTION extends Exception> void relationshipVisit( KernelStatement statement, long relId,
RelationshipVisitor<EXCEPTION> visitor )
throws EntityNotFoundException, EXCEPTION
{
guard.check();
entityReadDelegate.relationshipVisit( statement, relId, visitor );
}
@Override
public Cursor<NodeItem> nodeCursorById( KernelStatement statement, long nodeId ) throws EntityNotFoundException
{
guard.check();
return entityReadDelegate.nodeCursorById( statement, nodeId );
}
@Override
public Cursor<NodeItem> nodeCursor( KernelStatement statement, long nodeId )
{
guard.check();
return entityReadDelegate.nodeCursor( statement, nodeId );
}
@Override
public Cursor<NodeItem> nodeCursor( TxStateHolder txStateHolder, StoreStatement statement, long nodeId )
{
guard.check();
return entityReadDelegate.nodeCursor( txStateHolder, statement, nodeId );
}
@Override
public Cursor<RelationshipItem> relationshipCursorById( KernelStatement statement, long relId )
throws EntityNotFoundException
{
guard.check();
return entityReadDelegate.relationshipCursorById( statement, relId );
}
@Override
public Cursor<RelationshipItem> relationshipCursor( KernelStatement statement, long relId )
{
guard.check();
return entityReadDelegate.relationshipCursor( statement, relId );
}
@Override
public Cursor<RelationshipItem> relationshipCursor( TxStateHolder txStateHolder,
StoreStatement statement,
long relId )
{
guard.check();
return entityReadDelegate.relationshipCursor( txStateHolder, statement, relId );
}
@Override
public Cursor<NodeItem> nodeCursorGetAll( KernelStatement statement )
{
guard.check();
return entityReadDelegate.nodeCursorGetAll( statement );
}
@Override
public Cursor<RelationshipItem> relationshipCursorGetAll( KernelStatement statement )
{
guard.check();
return entityReadDelegate.relationshipCursorGetAll( statement );
}
@Override
public Cursor<NodeItem> nodeCursorGetForLabel( KernelStatement statement, int labelId )
{
guard.check();
return entityReadDelegate.nodeCursorGetForLabel( statement, labelId );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromIndexSeek( KernelStatement statement, IndexDescriptor index, Object value )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromIndexSeek( statement, index, value );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromIndexScan( KernelStatement statement, IndexDescriptor index )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromIndexScan( statement, index );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromIndexRangeSeekByNumber( KernelStatement statement,
IndexDescriptor index,
Number lower, boolean includeLower,
Number upper, boolean includeUpper )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromIndexRangeSeekByNumber( statement, index, lower, includeLower, upper,
includeUpper );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromIndexRangeSeekByString( KernelStatement statement,
IndexDescriptor index,
String lower, boolean includeLower,
String upper, boolean includeUpper )
throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromIndexRangeSeekByString( statement, index, lower, includeLower, upper,
includeUpper );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromIndexSeekByPrefix( KernelStatement statement,
IndexDescriptor index,
String prefix ) throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromIndexSeekByPrefix( statement, index, prefix );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromIndexRangeSeekByPrefix( KernelStatement statement,
IndexDescriptor index,
String prefix ) throws IndexNotFoundKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromIndexRangeSeekByPrefix( statement, index, prefix );
}
@Override
public Cursor<NodeItem> nodeCursorGetFromUniqueIndexSeek( KernelStatement statement,
IndexDescriptor index,
Object value ) throws IndexNotFoundKernelException, IndexBrokenKernelException
{
guard.check();
return entityReadDelegate.nodeCursorGetFromUniqueIndexSeek( statement, index, value );
}
private static class GuardedRelationshipIterator implements RelationshipIterator
{
private final Guard guard;
private final RelationshipIterator iterator;
public GuardedRelationshipIterator( Guard guard, RelationshipIterator iterator )
{
this.guard = guard;
this.iterator = iterator;
}
@Override
public <EXCEPTION extends Exception> boolean relationshipVisit( long relationshipId,
RelationshipVisitor<EXCEPTION> visitor ) throws EXCEPTION
{
guard.check();
return iterator.relationshipVisit( relationshipId, visitor );
}
@Override
public boolean hasNext()
{
return iterator.hasNext();
}
@Override
public long next()
{
return iterator.next();
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2016, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.jenkinsci.remoting.engine;
import hudson.remoting.Channel;
import hudson.remoting.ChannelBuilder;
import java.io.IOException;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.NotThreadSafe;
import org.jenkinsci.remoting.protocol.impl.ConnectionRefusalException;
/**
* Represents the state of a connection event. This object should not be retained by the
* {@link JnlpConnectionStateListener}
*
* @since FIXME
*/
@NotThreadSafe
public class JnlpConnectionState {
/**
* The current iterator being used to process the listener notifications.
*/
private static final ThreadLocal<Iterator<JnlpConnectionStateListener>> fireIterator
= new ThreadLocal<Iterator<JnlpConnectionStateListener>>();
/**
* The property name for the secret key.
*/
public static final String SECRET_KEY = "Secret-Key";
/**
* The property name for the client name key.
*/
public static final String CLIENT_NAME_KEY = "Node-Name";
/**
* The proprty name for the cookie name key.
*/
public static final String COOKIE_KEY = "Cookie";
/**
* Socket connection to the agent.
*/
@Nonnull
private final Socket socket;
/**
* The {@link JnlpConnectionStateListener} instances that are still interested in this connection event.
*/
private final List<JnlpConnectionStateListener> listeners;
/**
* The connection properties, this is {@code null} in
* {@link JnlpConnectionStateListener#beforeProperties(JnlpConnectionState)} and non-null thereafter.
*/
@CheckForNull
private Map<String, String> properties;
/**
* The {@link ChannelBuilder, this is {@code null} except in
* {@link JnlpConnectionStateListener#beforeChannel(JnlpConnectionState)}.
*/
@CheckForNull
private ChannelBuilder channelBuilder;
/**
* The {@link Channel}, this is {@code null} until
* {@link JnlpConnectionStateListener#afterChannel(JnlpConnectionState)}.
*/
@CheckForNull
private Channel channel;
/**
* Holds the reason for connection rejection
*/
@CheckForNull
private ConnectionRefusalException rejection;
/**
* The reason for the channel being closed (if supplied);
*/
@CheckForNull
private IOException closeCause;
/**
* The current state in the event lifecycle.
*/
private State lifecycle = State.INITIALIZED;
/**
* Any connection specific state that the listener that has {@link #approve()} for the connection wants to
* track between callbacks.
*/
@CheckForNull
private ListenerState stash;
/**
* Constructor.
*
* @param socket the {@link Socket}.
* @param listeners the {@link JnlpConnectionStateListener} instances.
*/
protected JnlpConnectionState(@Nonnull Socket socket, List<? extends JnlpConnectionStateListener> listeners) {
this.socket = socket;
this.listeners = new ArrayList<JnlpConnectionStateListener>(listeners);
}
/**
* Gets the socket that the connection is on.
*
* @return the socket that the connection is on.
*/
@Nonnull
public Socket getSocket() {
return socket;
}
/**
* Gets the connection properties.
*
* @return the connection properties.
* @throws IllegalStateException if invoked before
* {@link JnlpConnectionStateListener#afterProperties(JnlpConnectionState)}
*/
public Map<String, String> getProperties() {
if (lifecycle.compareTo(State.AFTER_PROPERTIES) < 0) {
throw new IllegalStateException("The connection properties have not been exchanged yet");
}
return properties;
}
/**
* Gets the named connection property.
*
* @param name the property name.
* @return the connection property.
* @throws IllegalStateException if invoked before
* {@link JnlpConnectionStateListener#afterProperties(JnlpConnectionState)}
*/
public String getProperty(String name) {
if (lifecycle.compareTo(State.AFTER_PROPERTIES) < 0) {
throw new IllegalStateException("The connection properties have not been exchanged yet");
}
return properties == null ? null : properties.get(name);
}
/**
* Gets the {@link ChannelBuilder} that will be used to create the connection's {@link Channel}.
*
* @return the {@link ChannelBuilder}
* @throws IllegalStateException if invoked outside of
* {@link JnlpConnectionStateListener#beforeChannel(JnlpConnectionState)}
*/
public ChannelBuilder getChannelBuilder() {
if (lifecycle.compareTo(State.APPROVED) < 0) {
throw new IllegalStateException("The connection has not been approved yet");
}
if (lifecycle.compareTo(State.AFTER_CHANNEL) >= 0) {
throw new IllegalStateException("The channel has already been built");
}
return channelBuilder;
}
/**
* Gets the connection's {@link Channel}.
*
* @return the {@link Channel} (may be closed already), may be {@code null} in
* {@link JnlpConnectionStateListener#afterDisconnect(JnlpConnectionState)} if the socket was closed by
* the client.
* @throws IllegalStateException if invoked before
* {@link JnlpConnectionStateListener#afterChannel(JnlpConnectionState)}
*/
public Channel getChannel() {
if (lifecycle.compareTo(State.AFTER_CHANNEL) < 0) {
throw new IllegalStateException("The channel has not been built yet");
}
return channel;
}
/**
* Gets the reason for the channel being closed if available.
*
* @return the reason or {@code null} if termination was normal.
* @throws IllegalStateException if invoked before
* {@link JnlpConnectionStateListener#channelClosed(JnlpConnectionState)}
*/
@CheckForNull
public IOException getCloseCause() {
if (lifecycle.compareTo(State.CHANNEL_CLOSED) < 0) {
throw new IllegalStateException("The channel has not been closed yet");
}
return closeCause;
}
/**
* Signals that the current {@link JnlpConnectionStateListener} is not interested in this event any more. If all
* {@link JnlpConnectionStateListener} implementations {@link #ignore()} the event then the connection will be
* rejected.
*
* @throws IllegalStateException if invoked outside of
* {@link JnlpConnectionStateListener#beforeProperties(JnlpConnectionState)} or
* {@link JnlpConnectionStateListener#afterProperties(JnlpConnectionState)}.
*/
public void ignore() {
if (lifecycle.compareTo(State.AFTER_PROPERTIES) > 0) {
throw new IllegalStateException("Events cannot be ignored after approval/rejection");
}
Iterator<JnlpConnectionStateListener> iterator = JnlpConnectionState.fireIterator.get();
if (iterator == null) {
throw new IllegalStateException(
"Events can only be ignored from within the JnlpConnectionStateListener notification methods");
}
iterator.remove();
}
/**
* Signals that the current {@link JnlpConnectionStateListener} is declaring ownership of this event, approving
* the connection and all other {@link JnlpConnectionStateListener} instances will now be ignored.
* This method must be called by at least one {@link JnlpConnectionStateListener} or the connection will be
* rejected.
*
* @throws IllegalStateException if invoked outside of
* {@link JnlpConnectionStateListener#beforeProperties(JnlpConnectionState)} or
* {@link JnlpConnectionStateListener#afterProperties(JnlpConnectionState)}.
*/
public void approve() {
if (lifecycle.compareTo(State.AFTER_PROPERTIES) > 0) {
throw new IllegalStateException("Events cannot be approved after approval/rejection");
}
lifecycle = State.APPROVED;
}
/**
* Signals that the current {@link JnlpConnectionStateListener} is declaring ownership of this event, rejecting
* the connection and all other {@link JnlpConnectionStateListener} instances will now be ignored.
*
* @throws IllegalStateException if invoked outside of
* {@link JnlpConnectionStateListener#beforeProperties(JnlpConnectionState)} or
* {@link JnlpConnectionStateListener#afterProperties(JnlpConnectionState)}.
*/
public void reject(ConnectionRefusalException reason) {
if (lifecycle.compareTo(State.AFTER_PROPERTIES) > 0) {
throw new IllegalStateException("Events cannot be rejected after approval/rejection");
}
lifecycle = State.REJECTED;
rejection = reason;
}
/**
* Retrieves the previously stashed state.
* @param clazz the expected class of the stashed state.
* @param <S> the expected class of the stashed state.
* @return the stashed state.
* @throws IllegalStateException if invoked before {@link #approve()}
* @see #setStash(ListenerState)
*/
@CheckForNull
public <S extends ListenerState> S getStash(Class<S> clazz) {
if (lifecycle.compareTo(State.APPROVED) < 0) {
throw new IllegalStateException("The connection has not been approved yet");
}
return clazz.cast(stash);
}
/**
* Stores some listener specific state for later retrieval.
*
* @param stash the state to stash.
* @param <S> the expected class of the stashed state.
* @throws IllegalStateException if invoked before {@link #approve()}
* @see #getStash(Class)
*/
public <S extends ListenerState> void setStash(@CheckForNull S stash) {
if (lifecycle.compareTo(State.APPROVED) < 0) {
throw new IllegalStateException("The connection has not been approved yet");
}
this.stash = stash;
}
/**
* Encapsulates the common event dispatch logic.
*
* @param handler the logic to apply.
*/
private void fire(EventHandler handler) {
Iterator<JnlpConnectionStateListener> iterator = listeners.iterator();
JnlpConnectionState.fireIterator.set(iterator);
try {
final State lifecycle = this.lifecycle;
while (iterator.hasNext()) {
JnlpConnectionStateListener current = iterator.next();
handler.invoke(current, this);
if (lifecycle != this.lifecycle) {
// a listener has changed the state, thus they are the owner
listeners.retainAll(Collections.singleton(current));
return;
}
}
} finally {
JnlpConnectionState.fireIterator.remove();
}
}
/**
* Advances the connection state to indicate that a connection has been "secured" and the property exchange
* is about to take place.
*
* @throws ConnectionRefusalException if the connection has been refused.
*/
/*package*/ void fireBeforeProperties() throws ConnectionRefusalException {
if (lifecycle != State.INITIALIZED) {
throw new IllegalStateException("fireBeforeProperties cannot be invoked at lifecycle " + lifecycle);
}
lifecycle = State.BEFORE_PROPERTIES;
// TODO fire(JnlpConnectionStateListener::beforeProperties); // Java 8
fire(new EventHandler() {
@Override
public void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event) {
listener.beforeProperties(event);
}
});
// check for early rejection
if (lifecycle == State.REJECTED || listeners.isEmpty()) {
lifecycle = State.REJECTED;
ConnectionRefusalException rejection = this.rejection;
this.rejection = null;
if (rejection != null) {
throw rejection;
}
throw new ConnectionRefusalException("No listeners interested in connection");
}
}
/**
* Advances the connection state to indicate that the property exchange has completed.
*
* @throws ConnectionRefusalException if the connection has been refused.
*/
/*package*/ void fireAfterProperties(@Nonnull Map<String, String> properties) throws ConnectionRefusalException {
if (lifecycle != State.BEFORE_PROPERTIES) {
throw new IllegalStateException("fireAfterProperties cannot be invoked at lifecycle " + lifecycle);
}
this.properties = new HashMap<String, String>(properties);
lifecycle = State.AFTER_PROPERTIES;
// TODO fire(JnlpConnectionStateListener::afterProperties);
fire(new EventHandler() {
@Override
public void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event) {
listener.afterProperties(event);
}
});
// must have approval or else connection is rejected
if (lifecycle != State.APPROVED || listeners.isEmpty()) {
lifecycle = State.REJECTED;
ConnectionRefusalException rejection = this.rejection;
this.rejection = null;
if (rejection != null) {
throw rejection;
}
throw new ConnectionRefusalException("No listeners interested in connection");
}
}
/**
* Advances the connection state to indicate that the channel is about to be created.
*
* @param builder the {@link ChannelBuilder} that will be used to create the channel.
*/
/*package*/ void fireBeforeChannel(ChannelBuilder builder) {
if (lifecycle != State.APPROVED) {
throw new IllegalStateException("fireBeforeChannel cannot be invoked at lifecycle " + lifecycle);
}
lifecycle = State.BEFORE_CHANNEL;
this.channelBuilder = builder;
// TODO fire(JnlpConnectionStateListener::beforeChannel);
fire(new EventHandler() {
@Override
public void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event) {
listener.beforeChannel(event);
}
});
}
/**
* Advances the connection state to indicate that the channel has been created.
*
* @param channel the {@link Channel} (may be closed already but should not unless there is a serious race with
* the remote).
*/
/*package*/ void fireAfterChannel(Channel channel) {
if (lifecycle != State.BEFORE_CHANNEL) {
throw new IllegalStateException("fireAfterChannel cannot be invoked at lifecycle " + lifecycle);
}
lifecycle = State.AFTER_CHANNEL;
this.channelBuilder = null;
this.channel = channel;
// TODO fire(JnlpConnectionStateListener::afterChannel);
fire(new EventHandler() {
@Override
public void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event) {
listener.afterChannel(event);
}
});
}
/**
* Advances the connection state to indicate that the channel has been closed.
*
* @param cause the reason why the channel was closed or {@code null} if normally closed
*/
/*package*/ void fireChannelClosed(@CheckForNull IOException cause) {
if (lifecycle.compareTo(State.BEFORE_CHANNEL) < 0) {
throw new IllegalStateException("fireChannelClosed cannot be invoked at lifecycle " + lifecycle);
}
closeCause = cause;
lifecycle = State.CHANNEL_CLOSED;
// TODO fire(JnlpConnectionStateListener::channelClosed);
fire(new EventHandler() {
@Override
public void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event) {
listener.channelClosed(event);
}
});
}
/**
* Advances the connection state to indicate that the socket has been closed.
*/
/*package*/ void fireAfterDisconnect() {
if (lifecycle == State.AFTER_CHANNEL) {
fireChannelClosed(null);
}
lifecycle = State.DISCONNECTED;
// TODO fire(JnlpConnectionStateListener::afterDisconnect);
fire(new EventHandler() {
@Override
public void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event) {
listener.afterDisconnect(event);
}
});
}
/**
* Lambda interface used by {@link JnlpConnectionState#fire(EventHandler)}
*/
private interface EventHandler {
/**
* Invokes the event on the listener.
*
* @param listener the listener.
* @param event the event.
*/
void invoke(JnlpConnectionStateListener listener, JnlpConnectionState event);
}
/**
* The connection state.
*/
private enum State {
/**
* The initial state when created. The {@link JnlpConnectionState} should never be visible to
* {@link JnlpConnectionStateListener} in this state.
*/
INITIALIZED,
/**
* The state before {@link JnlpConnectionState#fireAfterProperties(Map)}.
*/
BEFORE_PROPERTIES,
/**
* The state once the {@link JnlpConnectionState#getProperties()} are available and before
* {@link JnlpConnectionState#approve()} or
* {@link JnlpConnectionState#reject(ConnectionRefusalException)}.
*/
AFTER_PROPERTIES,
/**
* The state once {@link JnlpConnectionState#reject(ConnectionRefusalException)} has been called.
*/
REJECTED,
/**
* The state after {@link JnlpConnectionState#approve()} has been called and before the {@link Channel}
* has been built.
*/
APPROVED,
/**
* The state before the channel is to be built.
*/
BEFORE_CHANNEL,
/**
* The channel has been built and is probably still open.
*/
AFTER_CHANNEL,
/**
* The channel has been closed but the socket may or may not have been closed yet.
*/
CHANNEL_CLOSED,
/**
* The socket has been closed.
*/
DISCONNECTED
}
/**
* Marker base class for all stashed state data.
*
* @see JnlpConnectionState#setStash(ListenerState)
* @see JnlpConnectionState#getStash(Class)
*/
public interface ListenerState {
}
}
| |
package sagex.phoenix.fanart;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import sagex.api.MediaFileAPI;
import sagex.phoenix.configuration.proxy.GroupProxy;
import sagex.phoenix.metadata.ISageCustomMetadataRW;
import sagex.phoenix.metadata.MediaArtifactType;
import sagex.phoenix.metadata.MediaType;
import sagex.phoenix.metadata.MetadataConfiguration;
import sagex.phoenix.util.Utils;
/**
* Central Folder V2 Support as defined the <a
* href="http://forums.sagetv.com/forums/showthread.php?p=343902&postcount=35"
* >SageTV Thread</a><br>
* <p/>
* <pre>
* Base Folders
* CentralFolder\TV
* CentralFolder\Movies
* CentralFolder\Music
* GentralFolder\Actors
* CentralFolder\Genres
*
* TV Folders
* TV\SeriesTitle\seriesInfo.data (Text file containing base series information Description, Genre, etc)
* TV\SeriesTitle\Backgrounds\(All Base Backgrounds Images)
* TV\SeriesTitle\Posters\(All Base Posters Images)
* TV\SeriesTitle\Banners\(All Base Banners Images)
* TV\SeriesTitle\Actors\(All Base Actor Images)
* TV\SeriesTitle\Season #\Backgrounds\(All Season Specific Backgrounds Images)
* TV\SeriesTitle\Season #\Posters\(All Season Specific Posters Images)
* TV\SeriesTitle\Season #\Banners\(All Season Specific Banners Images)
* TV\SeriesTitle\Season #\Actors\(All Season Specific Actor Images)
* TV\Genres\GenreName\(All GenreName Images)
*
* Movies
* Movies\MovieTitle\Backgrounds\(All Background Images)
* Movies\MovieTitle\Posters\(All Poster Images)
* Movies\MovieTitle\Banners\(All Banner Images)
* Movies\MovieTitle\Actors\(All Actor Images)
* Movies\Genres\Family\(All Family Genre Images)
*
* Actors
* GentralFolder\Actors\Actor Name\ActorName.jpg
*
* Genres
* GentralFolder\Genres\Genre Name\Posters\(All Genres images)
* GentralFolder\Genres\Genre Name\Banners\(All Genres images)
* GentralFolder\Genres\Genre Name\Backgrounds\(All Genres images)
*
* </pre>
* <p/>
* <br/>
* To enable Central Fanart Support, set the following properties
* <p/>
* <pre>
* phoenix/mediametadata/fanartEnabled=true
* phoenix/mediametadata/fanartCentralFolder=YOUR_CENTRAL_FOLDER
* </pre>
* <p/>
* <br/>
*
* @author seans
*/
@Deprecated
public class PhoenixFanartSupport implements IFanartSupport {
private static final Logger log = Logger.getLogger(PhoenixFanartSupport.class);
private MetadataConfiguration fanartConfig = null;
// private LocalFanartSupport localFanart = new LocalFanartSupport();
public PhoenixFanartSupport() {
fanartConfig = GroupProxy.get(MetadataConfiguration.class);
initializeFanartFolder(fanartConfig.getFanartCentralFolder());
}
private void initializeFanartFolder(String dir) {
log.info("Phoenix Fanart initializing");
if (StringUtils.isEmpty(dir)) {
dir = System.getProperty("user.dir") + File.separator + "STVs" + File.separator + "Phoenix" + File.separator + "Fanart";
fanartConfig.setCentralFanartFolder(dir);
}
File fanartFolder = new File(dir);
if (!fanartFolder.exists()) {
log.warn("Fanart folder does not exist, creating: " + fanartFolder);
if (!fanartFolder.mkdirs()) {
log.warn("Failed to create the fanart folder, this may be a permissions problem: Folder; " + fanartFolder);
}
if (!fanartFolder.canWrite()) {
log.warn("You don't have permissions to write to your fanart folder: " + fanartFolder);
}
}
log.info("Phoenix Fanart initialized");
}
/**
* this is meant to return a subset of metadata properties that is useful
* for determining fanart locations
*
* @param mediaType
* @param mediaObject
* @return
*/
private Map<String, String> getMetadata(MediaType mediaType, Object mediaObject) {
if (mediaType == MediaType.TV) {
Map<String, String> props = new HashMap<String, String>();
for (String key : new String[]{FanartUtil.SEASON_NUMBER, FanartUtil.EPISODE_NUMBER}) {
String v = SageFanartUtil.GetMediaFileMetadata(mediaObject, key);
if (!SageFanartUtil.isEmpty(v)) {
props.put(key, v);
}
}
return props;
}
return null;
}
public String GetFanartCentralFolder() {
return fanartConfig.getFanartCentralFolder();
}
public boolean IsFanartEnabled() {
return fanartConfig.isFanartEnabled();
}
public void SetFanartCentralFolder(String folder) {
log.debug("Setting Central Fanart Folder: " + folder);
fanartConfig.setCentralFanartFolder(folder);
initializeFanartFolder(folder);
}
public void SetIsFanartEnabled(boolean value) {
fanartConfig.setFanartEnabled(value);
}
private boolean exists(File f) {
return f != null && f.exists();
}
private boolean exists(File f[]) {
return f != null && f.length > 0;
}
private File getDefaultArtifact(Object mediaObject, MediaArtifactType artifactType) {
String def = null;
if (artifactType == MediaArtifactType.POSTER) {
def = MediaFileAPI.GetMediaFileMetadata(mediaObject, ISageCustomMetadataRW.FieldName.DEFAULT_POSTER);
} else if (artifactType == MediaArtifactType.BACKGROUND) {
def = MediaFileAPI.GetMediaFileMetadata(mediaObject, ISageCustomMetadataRW.FieldName.DEFAULT_BACKGROUND);
} else if (artifactType == MediaArtifactType.BANNER) {
def = MediaFileAPI.GetMediaFileMetadata(mediaObject, ISageCustomMetadataRW.FieldName.DEFAULT_BANNER);
}
if (!StringUtils.isEmpty(def)) {
File f = new File(GetFanartCentralFolder(), def);
if (f.exists() && f.isFile())
return f;
}
return null;
}
public File GetFanartArtifact(Object mediaObject, MediaType mediaType, String mediaTitle, MediaArtifactType artifactType,
String artifactTitle, Map<String, String> metadata) {
File file = null;
SimpleMediaFile mf = SageFanartUtil.GetSimpleMediaFile(mediaObject);
mediaType = Utils.returnNonNull(mediaType, mf.getMediaType());
mediaTitle = Utils.returnNonNull(mediaTitle, mf.getTitle());
metadata = Utils.returnNonNull(metadata, getMetadata(mediaType, mediaObject));
String fanartFolder = GetFanartCentralFolder();
if (!fanartConfig.getUseSeason()) {
// if we are setup to not use season specific fanart, null out
// metadata
metadata = null;
}
// check for a default file
file = getDefaultArtifact(mediaObject, artifactType);
if (file == null) {
file = GetFanartArtifactForTitle(mediaObject, mediaType, mediaTitle, artifactType, artifactTitle, metadata,
fanartFolder);
}
if (!exists(file) && mf.getMediaType() == MediaType.MUSIC) {
file = GetFanartArtifactForTitle(mediaObject, MediaType.MUSIC, SageFanartUtil.GetAlbumArtist(mediaObject),
artifactType, artifactTitle, metadata, GetFanartCentralFolder());
if (!exists(file)) {
file = GetFanartArtifactForTitle(mediaObject, MediaType.MUSIC, SageFanartUtil.GetAlbumPersonArtist(mediaObject),
artifactType, artifactTitle, metadata, GetFanartCentralFolder());
}
}
// fallback to local fanart
// if (!file.exists()) {
// file = localFanart.GetFanartArtifact(mediaObject, mediaType,
// mediaTitle, artifactType, artifactTitle, metadata);
// }
return file;
}
public File GetFanartArtifactDir(Object mediaObject, MediaType mediaType, String mediaTitle, MediaArtifactType artifactType,
String artifactTitle, Map<String, String> metadata, boolean create) {
SimpleMediaFile mf = SageFanartUtil.GetSimpleMediaFile(mediaObject);
mediaType = Utils.returnNonNull(mediaType, mf.getMediaType());
mediaTitle = Utils.returnNonNull(mediaTitle, mf.getTitle());
metadata = Utils.returnNonNull(metadata, getMetadata(mediaType, mediaObject));
String fanartFolder = GetFanartCentralFolder();
File f = FanartUtil.getCentralFanartDir(mediaType, mediaTitle, artifactType, artifactTitle, fanartFolder, metadata);
if (create && f != null && !f.exists()) {
sagex.phoenix.util.FileUtils.mkdirsQuietly(f);
if (!f.exists()) {
log.warn("Unable to create directory: " + f.getAbsolutePath() + "; Pemission issue?");
}
}
return f;
}
public File[] GetFanartArtifacts(Object mediaObject, MediaType mediaType, String mediaTitle, MediaArtifactType artifactType,
String artifactTitle, Map<String, String> metadata) {
File files[] = null;
SimpleMediaFile mf = SageFanartUtil.GetSimpleMediaFile(mediaObject);
mediaType = Utils.returnNonNull(mediaType, mf.getMediaType());
mediaTitle = Utils.returnNonNull(mediaTitle, mf.getTitle());
metadata = Utils.returnNonNull(metadata, getMetadata(mediaType, mediaObject));
String fanartFolder = GetFanartCentralFolder();
if (!fanartConfig.getUseSeason()) {
// if we are setup to not use season specific fanart, null out
// metadata
metadata = null;
}
files = GetFanartArtifactsForTitle(mediaObject, mediaType, mediaTitle, artifactType, artifactTitle, metadata, fanartFolder);
if (!exists(files) && mf.getMediaType() == MediaType.MUSIC) {
files = GetFanartArtifactsForTitle(mediaObject, mediaType, SageFanartUtil.GetAlbumArtist(mediaObject), artifactType,
artifactTitle, metadata, fanartFolder);
if (!exists(files)) {
files = GetFanartArtifactsForTitle(mediaObject, mediaType, SageFanartUtil.GetAlbumPersonArtist(mediaObject),
artifactType, artifactTitle, metadata, fanartFolder);
}
}
// if no central fanart, then check for local fanart
// if (!exists(files)) {
// File f = localFanart.GetFanartArtifact(mediaObject, mediaType,
// mediaTitle, artifactType, artifactTitle, metadata);
// if (f.exists()) {
// files = new File[] {f};
// }
// }
return files;
}
public void SetFanartArtifact(Object mediaObject, File fanart, MediaType mediaType, String mediaTitle,
MediaArtifactType artifactType, String artifactTitle, Map<String, String> metadata) {
try {
String central = (new File(GetFanartCentralFolder())).getCanonicalPath();
String file = fanart.getCanonicalPath();
if (!file.startsWith(central)) {
throw new Exception("You can only set a fanart artifact relative to the fanart folder. Folder: " + central
+ "; fanart: " + file);
}
String art = file.substring(central.length());
if (art.startsWith(File.separator)) {
art = StringUtils.strip(art, File.separator);
}
String key = null;
if (artifactType == MediaArtifactType.POSTER) {
key = ISageCustomMetadataRW.FieldName.DEFAULT_POSTER;
} else if (artifactType == MediaArtifactType.BACKGROUND) {
key = ISageCustomMetadataRW.FieldName.DEFAULT_BACKGROUND;
} else if (artifactType == MediaArtifactType.BANNER) {
key = ISageCustomMetadataRW.FieldName.DEFAULT_BANNER;
}
if (key == null)
throw new Exception("Invalid Artifact Type: " + artifactType + "; Can't set default artifact.");
MediaFileAPI.SetMediaFileMetadata(mediaObject, key, art);
} catch (Exception e) {
log.warn("Failed to set the default fanart artifact!", e);
}
}
private static final String getSeason(Map<String, String> metadata) {
if (metadata == null) {
return null;
}
return metadata.get(FanartUtil.SEASON_NUMBER);
}
public File GetFanartArtifactForTitle(Object mediaObject, MediaType mediaType, String mediaTitle,
MediaArtifactType artifactType, String artifactTitle, Map<String, String> metadata, String centralFolder) {
File art = null;
art = FanartUtil.getCentralFanartArtifact(mediaType, mediaTitle, artifactType, artifactTitle, centralFolder, metadata);
if (art == null || !art.exists()) {
if (mediaType == MediaType.TV && metadata != null && metadata.get(FanartUtil.SEASON_NUMBER) != null) {
// do a search without the season metadata
art = FanartUtil.getCentralFanartArtifact(mediaType, mediaTitle, artifactType, artifactTitle, centralFolder, null);
}
}
// if no matches, then find the first one
if (art == null || !art.exists()) {
File all[] = GetFanartArtifactsForTitle(mediaObject, mediaType, mediaTitle, artifactType, artifactTitle, metadata,
centralFolder);
if (!SageFanartUtil.isEmpty(all)) {
art = all[0];
}
}
if (log.isDebugEnabled()) {
log.debug("GetFanartArtifactForTitle: MediaType: " + mediaType + "; MediaTitle: " + mediaTitle + "; ArtifactType: "
+ artifactType + "; ArtifactTitle: " + artifactTitle + "; Artifact: " + art + "; Season: "
+ getSeason(metadata) + "; MediaFile: " + mediaObject);
}
return art;
}
public File[] GetFanartArtifactsForTitle(Object mediaObject, MediaType mediaType, String mediaTitle,
MediaArtifactType artifactType, String artifactTitle, Map<String, String> metadata, String centralFolder) {
File files[] = null;
files = FanartUtil.getCentalFanartArtifacts(mediaType, mediaTitle, artifactType, artifactTitle, centralFolder, metadata);
if (files == null || files.length == 0) {
if (mediaType == MediaType.TV && metadata != null && metadata.get(FanartUtil.SEASON_NUMBER) != null) {
// do a search without the season metadata
files = FanartUtil
.getCentalFanartArtifacts(mediaType, mediaTitle, artifactType, artifactTitle, centralFolder, null);
}
}
if (log.isDebugEnabled()) {
log.debug("GetFanartArtifactsForTitle: MediaType: " + mediaType + "; MediaTitle: " + mediaTitle + "; ArtifactType: "
+ artifactType + "; ArtifactTitle: " + artifactTitle + "; Artifact Count: "
+ (files == null ? 0 : files.length) + "; MediaFile: " + mediaObject);
}
return files;
}
}
| |
/*
* Copyright (C) 2013 Brett Wooldridge
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaxxer.nuprocess.internal;
import com.sun.jna.Memory;
import com.sun.jna.Native;
import com.zaxxer.nuprocess.NuProcess;
import com.zaxxer.nuprocess.NuProcessHandler;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.zaxxer.nuprocess.internal.Constants.NUMBER_OF_THREADS;
import static java.util.concurrent.locks.LockSupport.parkNanos;
@SuppressWarnings("WeakerAccess")
public abstract class BasePosixProcess implements NuProcess
{
protected static final Logger LOGGER = Logger.getLogger(BasePosixProcess.class.getCanonicalName());
private static final boolean IS_SOFTEXIT_DETECTION;
private static final ByteBuffer STDIN_CLOSED_PENDING_WRITE_TOMBSTONE = ByteBuffer.allocate(1);
protected static final IEventProcessor<? extends BasePosixProcess>[] processors;
protected static int processorRoundRobin;
@SuppressWarnings("unused")
private int exitcode; // set from native code in JDK 7
protected IEventProcessor<? super BasePosixProcess> myProcessor;
protected volatile NuProcessHandler processHandler;
protected volatile int pid;
protected volatile boolean isRunning;
public final AtomicBoolean cleanlyExitedBeforeProcess;
protected AtomicInteger exitCode;
protected CountDownLatch exitPending;
protected AtomicBoolean userWantsWrite;
// ******* Input/Output Buffers
private Memory outBufferMemory;
private Memory errBufferMemory;
private Memory inBufferMemory;
protected ByteBuffer outBuffer;
protected ByteBuffer errBuffer;
protected ByteBuffer inBuffer;
// ******* Stdin/Stdout/Stderr pipe handles
protected ReferenceCountedFileDescriptor stdin;
protected ReferenceCountedFileDescriptor stdout;
protected ReferenceCountedFileDescriptor stderr;
protected volatile int stdinWidow;
protected volatile int stdoutWidow;
protected volatile int stderrWidow;
protected AtomicBoolean stdinClosing;
protected boolean outClosed;
protected boolean errClosed;
private ConcurrentLinkedQueue<ByteBuffer> pendingWrites;
static {
IS_SOFTEXIT_DETECTION = Boolean.parseBoolean(System.getProperty("com.zaxxer.nuprocess.softExitDetection", "true"));
processors = new IEventProcessor<?>[NUMBER_OF_THREADS];
if (Boolean.parseBoolean(System.getProperty("com.zaxxer.nuprocess.enableShutdownHook", "true"))) {
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run()
{
for (IEventProcessor<? extends BasePosixProcess> processor : processors) {
if (processor != null) {
processor.shutdown();
}
}
}
}));
}
}
protected BasePosixProcess(NuProcessHandler processListener)
{
this.processHandler = processListener;
this.userWantsWrite = new AtomicBoolean();
this.cleanlyExitedBeforeProcess = new AtomicBoolean();
this.exitCode = new AtomicInteger();
this.exitPending = new CountDownLatch(1);
this.stdin = new ReferenceCountedFileDescriptor(-1);
this.stdout = new ReferenceCountedFileDescriptor(-1);
this.stderr = new ReferenceCountedFileDescriptor(-1);
this.stdinClosing = new AtomicBoolean();
this.outClosed = true;
this.errClosed = true;
}
// ************************************************************************
// NuProcess interface methods
// ************************************************************************
public abstract NuProcess start(List<String> command, String[] environment, Path cwd);
/**
* Runs the process synchronously.
*
* Pumping is done on the calling thread, and this method will not return until the process has exited.
*
* @since 1.3
*/
public abstract void run(List<String> command, String[] environment, Path cwd);
/** {@inheritDoc} */
@Override
public boolean isRunning()
{
return isRunning;
}
/** {@inheritDoc} */
@Override
public int waitFor(long timeout, TimeUnit unit) throws InterruptedException
{
if (timeout == 0) {
exitPending.await();
}
else if (!exitPending.await(timeout, unit)) {
return Integer.MIN_VALUE;
}
return exitCode.get();
}
/** {@inheritDoc} */
@Override
public void destroy(boolean force)
{
if (isRunning) {
int result = LibC.kill(pid, force ? LibC.SIGKILL : LibC.SIGTERM);
if (result != 0) {
int errno = Native.getLastError();
if (errno == LibC.ESRCH) {
LOGGER.log(Level.FINE, "{0}: The process exited before it could be {1}",
new Object[]{pid, force ? "killed" : "terminated"});
}
else {
throw new RuntimeException("Sending signal failed, return code: " + result + ", last error: " + errno);
}
}
}
}
/** {@inheritDoc} */
@Override
public void wantWrite()
{
try {
int fd = stdin.acquire();
if (fd != -1) {
userWantsWrite.set(true);
myProcessor.queueWrite(this);
}
else {
throw new IllegalStateException("closeStdin() method has already been called.");
}
} finally {
stdin.release();
}
}
/** {@inheritDoc} */
@Override
public void closeStdin(boolean force)
{
if (force) {
try {
int fd = stdin.acquire();
if (fd != -1) {
if (myProcessor != null) {
myProcessor.closeStdin(this);
}
stdin.close();
}
} finally {
stdin.release();
}
} else {
if (stdinClosing.compareAndSet(false, true)) {
pendingWrites.add(STDIN_CLOSED_PENDING_WRITE_TOMBSTONE);
myProcessor.queueWrite(this);
} else {
throw new IllegalStateException("closeStdin() method has already been called.");
}
}
}
/** {@inheritDoc} */
@Override
public void writeStdin(ByteBuffer buffer)
{
try {
int fd = stdin.acquire();
boolean closing = stdinClosing.get();
if (fd != -1 && !closing) {
pendingWrites.add(buffer);
myProcessor.queueWrite(this);
}
else {
throw new IllegalStateException("closeStdin() method has already been called.");
}
} finally {
stdin.release();
}
}
/** {@inheritDoc} */
@Override
public boolean hasPendingWrites()
{
return !pendingWrites.isEmpty();
}
/** {@inheritDoc} */
@Override
public void setProcessHandler(NuProcessHandler processHandler)
{
this.processHandler = processHandler;
}
// ************************************************************************
// Public methods
// ************************************************************************
public int getPid()
{
return pid;
}
public int getPID()
{
return pid;
}
public ReferenceCountedFileDescriptor getStdin()
{
return stdin;
}
public ReferenceCountedFileDescriptor getStdout()
{
return stdout;
}
public ReferenceCountedFileDescriptor getStderr()
{
return stderr;
}
public boolean isSoftExit()
{
return (IS_SOFTEXIT_DETECTION && outClosed && errClosed);
}
public void onExit(int statusCode)
{
if (exitPending.getCount() == 0) {
// TODO: handle SIGCHLD
return;
}
try {
closeStdin(true);
stdout.close();
stderr.close();
isRunning = false;
exitCode.set(statusCode);
if (outBuffer != null && !outClosed) {
outBuffer.flip();
processHandler.onStdout(outBuffer, true);
}
if (errBuffer != null && !errClosed) {
errBuffer.flip();
processHandler.onStderr(errBuffer, true);
}
if (statusCode != Integer.MAX_VALUE - 1) {
processHandler.onExit(statusCode);
}
}
catch (Exception e) {
// Don't let an exception thrown from the user's handler interrupt us
LOGGER.log(Level.WARNING, "Exception thrown from handler", e);
}
finally {
exitPending.countDown();
// Once the last reference to the buffer is gone, Java will finalize the buffer
// and release the native memory we allocated in initializeBuffers().
outBufferMemory = null;
errBufferMemory = null;
inBufferMemory = null;
outBuffer = null;
errBuffer = null;
inBuffer = null;
processHandler = null;
Memory.purge();
}
}
public void readStdout(int availability, int fd)
{
if (outClosed || availability == 0) {
return;
}
try {
if (availability < 0) {
outClosed = true;
outBuffer.flip();
processHandler.onStdout(outBuffer, true);
return;
}
int read = LibC.read(fd, outBuffer, Math.min(availability, outBuffer.remaining()));
if (read == -1) {
outClosed = true;
throw new RuntimeException("Unexpected eof");
// EOF?
}
outBuffer.limit(outBuffer.position() + read);
outBuffer.position(0);
processHandler.onStdout(outBuffer, false);
outBuffer.compact();
}
catch (Exception e) {
// Don't let an exception thrown from the user's handler interrupt us
LOGGER.log(Level.WARNING, "Exception thrown from handler", e);
}
if (!outBuffer.hasRemaining()) {
// The caller's onStdout() callback must set the buffer's position
// to indicate how many bytes were consumed, or else it will
// eventually run out of capacity.
throw new RuntimeException("stdout buffer has no bytes remaining");
}
}
public void readStderr(int availability, int fd)
{
if (errClosed || availability == 0) {
return;
}
try {
if (availability < 0) {
errClosed = true;
errBuffer.flip();
processHandler.onStderr(errBuffer, true);
return;
}
int read = LibC.read(fd, errBuffer, Math.min(availability, errBuffer.remaining()));
if (read == -1) {
// EOF?
errClosed = true;
throw new RuntimeException("Unexpected eof");
}
errBuffer.limit(errBuffer.position() + read);
errBuffer.position(0);
processHandler.onStderr(errBuffer, false);
errBuffer.compact();
}
catch (Exception e) {
// Don't let an exception thrown from the user's handler interrupt us
LOGGER.log(Level.WARNING, "Exception thrown from handler", e);
}
if (!errBuffer.hasRemaining()) {
// The caller's onStderr() callback must set the buffer's position
// to indicate how many bytes were consumed, or else it will
// eventually run out of capacity.
throw new RuntimeException("stderr buffer has no bytes remaining");
}
}
public boolean writeStdin(int availability, int fd)
{
if (availability <= 0 || fd == -1) {
return false;
}
if (inBuffer.hasRemaining()) {
int wrote;
do {
wrote = LibC.write(fd, inBuffer, Math.min(availability, inBuffer.remaining()));
if (wrote < 0) {
int errno = Native.getLastError();
if (errno == 11 /*EAGAIN on MacOS*/ || errno == 35 /*EAGAIN on Linux*/) {
availability /= 4;
continue;
}
// EOF?
stdin.close();
return false;
}
}
while (wrote < 0);
availability -= wrote;
inBuffer.position(inBuffer.position() + wrote);
if (inBuffer.hasRemaining()) {
return true;
}
}
if (!pendingWrites.isEmpty()) {
inBuffer.clear();
// copy the next buffer into our direct buffer (inBuffer)
ByteBuffer byteBuffer = pendingWrites.peek();
if (byteBuffer == STDIN_CLOSED_PENDING_WRITE_TOMBSTONE) {
// We've written everything the user requested, and the user wants to close stdin now.
closeStdin(true);
userWantsWrite.set(false);
pendingWrites.clear();
return false;
} else if (byteBuffer != null && byteBuffer.remaining() > BUFFER_CAPACITY) {
ByteBuffer slice = byteBuffer.slice();
slice.limit(BUFFER_CAPACITY);
inBuffer.put(slice);
byteBuffer.position(byteBuffer.position() + BUFFER_CAPACITY);
}
else if (byteBuffer != null) {
inBuffer.put(byteBuffer);
pendingWrites.poll();
}
inBuffer.flip();
// Recurse
if (inBuffer.hasRemaining()) {
if (availability <= 0) {
// We can't write now, so we want to be called back again once there is more availability.
return true;
}
return writeStdin(availability, fd);
}
}
// Check whether the user has requested a write window. If so, this will clear the request and set
// userWantsWrite to a known-false state. Clearing the flag here ensures, if wantWrite() is called
// while we're in onStdinReady or, racily, after onStdinReady returns but before the result can be
// used to update userWantsWrite, we don't "lose" the write request
if (!userWantsWrite.compareAndSet(true, false)) {
return false;
}
try {
inBuffer.clear();
if (processHandler.onStdinReady(inBuffer)) {
// If onStdinReady returns true, re-set userWantsWrite. If it returns false, we already set
// userWantsWrite to false before we made the callback so there's nothing to do
userWantsWrite.set(true);
}
if (inBuffer.hasRemaining() && availability > 0) {
// Recurse
return writeStdin(availability, fd);
}
return true;
}
catch (Exception e) {
LOGGER.log(Level.SEVERE, "Exception thrown handling writes to stdin " + processHandler, e);
// Don't let an exception thrown from the user's handler interrupt us
return false;
}
}
// ************************************************************************
// Private methods
// ************************************************************************
protected void afterStart() {
final long testSleep = Integer.getInteger("nuprocess.test.afterStartSleep", 0);
if (testSleep > 0) {
parkNanos(testSleep);
}
isRunning = true;
}
protected void initializeBuffers()
{
outClosed = false;
errClosed = false;
pendingWrites = new ConcurrentLinkedQueue<>();
outBufferMemory = new Memory(BUFFER_CAPACITY);
outBuffer = outBufferMemory.getByteBuffer(0, outBufferMemory.size()).order(ByteOrder.nativeOrder());
errBufferMemory = new Memory(BUFFER_CAPACITY);
errBuffer = errBufferMemory.getByteBuffer(0, outBufferMemory.size()).order(ByteOrder.nativeOrder());
inBufferMemory = new Memory(BUFFER_CAPACITY);
inBuffer = inBufferMemory.getByteBuffer(0, outBufferMemory.size()).order(ByteOrder.nativeOrder());
// Ensure stdin initially has 0 bytes pending write. We'll
// update this before invoking onStdinReady.
inBuffer.limit(0);
}
@SuppressWarnings("unchecked")
protected void registerProcess()
{
int mySlot;
synchronized (processors) {
mySlot = processorRoundRobin;
processorRoundRobin = (processorRoundRobin + 1) % processors.length;
}
myProcessor = (IEventProcessor<? super BasePosixProcess>) processors[mySlot];
myProcessor.registerProcess(this);
if (myProcessor.checkAndSetRunning()) {
CyclicBarrier spawnBarrier = myProcessor.getSpawnBarrier();
Thread t = new Thread(myProcessor, "ProcessQueue" + mySlot);
t.setDaemon(true);
t.start();
try {
spawnBarrier.await();
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
protected void callPreStart()
{
try {
processHandler.onPreStart(this);
}
catch (Exception e) {
// Don't let an exception thrown from the user's handler interrupt us
LOGGER.log(Level.WARNING, "Exception thrown from handler", e);
}
}
protected void callStart()
{
try {
processHandler.onStart(this);
}
catch (Exception e) {
// Don't let an exception thrown from the user's handler interrupt us
LOGGER.log(Level.WARNING, "Exception thrown from handler", e);
}
}
protected int[] createPipes()
{
int[] in = new int[2];
int[] out = new int[2];
int[] err = new int[2];
try {
int rc = LibC.pipe(in);
checkReturnCode(rc, "Create stdin pipe() failed");
rc = LibC.pipe(out);
checkReturnCode(rc, "Create stdout pipe() failed");
rc = LibC.pipe(err);
checkReturnCode(rc, "Create stderr pipe() failed");
setNonBlocking(in[1], out[0], err[0]);
stdin = new ReferenceCountedFileDescriptor(in[1]);
stdout = new ReferenceCountedFileDescriptor(out[0]);
stderr = new ReferenceCountedFileDescriptor(err[0]);
stdinWidow = in[0];
stdoutWidow = out[1];
stderrWidow = err[1];
return new int[] {in[1], out[0], err[0]};
}
catch (RuntimeException e) {
LOGGER.log(Level.SEVERE, "Error creating pipes", e);
initFailureCleanup(in, out, err);
throw e;
}
}
protected void initFailureCleanup(int[] in, int[] out, int[] err)
{
Set<Integer> unique = new HashSet<>();
// Add stdin pipe descriptors
unique.add(in[0]);
unique.add(in[1]);
// Add stdout pipe descriptors
unique.add(out[0]);
unique.add(out[1]);
// Add stderr pipe descriptors
unique.add(err[0]);
unique.add(err[1]);
// Remove 0, in case any of the above desriptors weren't allocated
unique.remove(0);
for (int fd : unique) {
LibC.close(fd);
}
}
protected static void checkReturnCode(int rc, String failureMessage)
{
if (rc != 0) {
throw new RuntimeException(failureMessage + ", return code: " + rc + ", last error: " + Native.getLastError());
}
}
private void setNonBlocking(int in, int out, int err)
{
int rc = LibC.fcntl(in, LibC.F_SETFL, LibC.fcntl(in, LibC.F_GETFL) | LibC.O_NONBLOCK);
checkReturnCode(rc, "fnctl on stdin handle failed");
rc = LibC.fcntl(out, LibC.F_SETFL, LibC.fcntl(out, LibC.F_GETFL) | LibC.O_NONBLOCK);
checkReturnCode(rc, "fnctl on stdout handle failed");
rc = LibC.fcntl(err, LibC.F_SETFL, LibC.fcntl(err, LibC.F_GETFL) | LibC.O_NONBLOCK);
checkReturnCode(rc, "fnctl on stderr handle failed");
}
}
| |
package fr.free.nrw.commons.contributions;
import static android.view.View.GONE;
import static android.view.View.VISIBLE;
import static fr.free.nrw.commons.di.NetworkingModule.NAMED_LANGUAGE_WIKI_PEDIA_WIKI_SITE;
import android.content.Context;
import android.content.res.Configuration;
import android.net.Uri;
import android.os.Bundle;
import android.os.Parcelable;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.widget.AppCompatTextView;
import androidx.fragment.app.FragmentManager;
import androidx.recyclerview.widget.GridLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.recyclerview.widget.RecyclerView.AdapterDataObserver;
import androidx.recyclerview.widget.RecyclerView.ItemAnimator;
import androidx.recyclerview.widget.RecyclerView.OnItemTouchListener;
import androidx.recyclerview.widget.SimpleItemAnimator;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import com.google.android.material.floatingactionbutton.FloatingActionButton;
import fr.free.nrw.commons.Media;
import fr.free.nrw.commons.R;
import fr.free.nrw.commons.Utils;
import fr.free.nrw.commons.auth.SessionManager;
import fr.free.nrw.commons.di.CommonsDaggerSupportFragment;
import fr.free.nrw.commons.utils.DialogUtil;
import fr.free.nrw.commons.media.MediaClient;
import fr.free.nrw.commons.utils.SystemThemeUtils;
import fr.free.nrw.commons.utils.ViewUtil;
import java.util.Locale;
import java.util.Objects;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.commons.lang3.StringUtils;
import org.wikipedia.dataclient.WikiSite;
import fr.free.nrw.commons.profile.ProfileActivity;
/**
* Created by root on 01.06.2018.
*/
public class ContributionsListFragment extends CommonsDaggerSupportFragment implements
ContributionsListContract.View, ContributionsListAdapter.Callback,
WikipediaInstructionsDialogFragment.Callback {
private static final String RV_STATE = "rv_scroll_state";
@BindView(R.id.contributionsList)
RecyclerView rvContributionsList;
@BindView(R.id.loadingContributionsProgressBar)
ProgressBar progressBar;
@BindView(R.id.fab_plus)
FloatingActionButton fabPlus;
@BindView(R.id.fab_camera)
FloatingActionButton fabCamera;
@BindView(R.id.fab_gallery)
FloatingActionButton fabGallery;
@BindView(R.id.noContributionsYet)
TextView noContributionsYet;
@BindView(R.id.fab_layout)
LinearLayout fab_layout;
@BindView(R.id.fab_custom_gallery)
FloatingActionButton fabCustomGallery;
@Inject
SystemThemeUtils systemThemeUtils;
@BindView(R.id.tv_contributions_of_user)
AppCompatTextView tvContributionsOfUser;
@Inject
ContributionController controller;
@Inject
MediaClient mediaClient;
@Named(NAMED_LANGUAGE_WIKI_PEDIA_WIKI_SITE)
@Inject
WikiSite languageWikipediaSite;
@Inject
ContributionsListPresenter contributionsListPresenter;
@Inject
SessionManager sessionManager;
private Animation fab_close;
private Animation fab_open;
private Animation rotate_forward;
private Animation rotate_backward;
private boolean isFabOpen;
private ContributionsListAdapter adapter;
@Nullable private Callback callback;
private final int SPAN_COUNT_LANDSCAPE = 3;
private final int SPAN_COUNT_PORTRAIT = 1;
private int contributionsSize;
String userName;
@Override
public void onCreate(@Nullable @org.jetbrains.annotations.Nullable final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//Now that we are allowing this fragment to be started for
// any userName- we expect it to be passed as an argument
if (getArguments() != null) {
userName = getArguments().getString(ProfileActivity.KEY_USERNAME);
}
if (StringUtils.isEmpty(userName)) {
userName = sessionManager.getUserName();
}
}
@Override
public View onCreateView(
final LayoutInflater inflater, @Nullable final ViewGroup container,
@Nullable final Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.fragment_contributions_list, container, false);
ButterKnife.bind(this, view);
contributionsListPresenter.onAttachView(this);
if (Objects.equals(sessionManager.getUserName(), userName)) {
tvContributionsOfUser.setVisibility(GONE);
fab_layout.setVisibility(VISIBLE);
} else {
tvContributionsOfUser.setVisibility(VISIBLE);
tvContributionsOfUser.setText(getString(R.string.contributions_of_user, userName));
fab_layout.setVisibility(GONE);
}
initAdapter();
return view;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (getParentFragment() != null && getParentFragment() instanceof ContributionsFragment) {
callback = ((ContributionsFragment) getParentFragment());
}
}
@Override
public void onDetach() {
super.onDetach();
callback = null;//To avoid possible memory leak
}
private void initAdapter() {
adapter = new ContributionsListAdapter(this, mediaClient);
}
@Override
public void onViewCreated(final View view, @Nullable final Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
initRecyclerView();
initializeAnimations();
setListeners();
}
private void initRecyclerView() {
final GridLayoutManager layoutManager = new GridLayoutManager(getContext(),
getSpanCount(getResources().getConfiguration().orientation));
rvContributionsList.setLayoutManager(layoutManager);
//Setting flicker animation of recycler view to false.
final ItemAnimator animator = rvContributionsList.getItemAnimator();
if (animator instanceof SimpleItemAnimator) {
((SimpleItemAnimator) animator).setSupportsChangeAnimations(false);
}
contributionsListPresenter.setup(userName,
Objects.equals(sessionManager.getUserName(), userName));
contributionsListPresenter.contributionList.observe(getViewLifecycleOwner(), list -> {
contributionsSize = list.size();
adapter.submitList(list);
if (callback != null) {
callback.notifyDataSetChanged();
}
});
rvContributionsList.setAdapter(adapter);
adapter.registerAdapterDataObserver(new AdapterDataObserver() {
@Override
public void onItemRangeInserted(int positionStart, int itemCount) {
super.onItemRangeInserted(positionStart, itemCount);
if (itemCount > 0 && positionStart == 0) {
if (adapter.getContributionForPosition(positionStart) != null) {
rvContributionsList
.scrollToPosition(0);//Newly upload items are always added to the top
}
}
}
/**
* Called whenever items in the list have changed
* Calls viewPagerNotifyDataSetChanged() that will notify the viewpager
*/
@Override
public void onItemRangeChanged(final int positionStart, final int itemCount) {
super.onItemRangeChanged(positionStart, itemCount);
if (callback != null) {
callback.viewPagerNotifyDataSetChanged();
}
}
});
//Fab close on touch outside (Scrolling or taping on item triggers this action).
rvContributionsList.addOnItemTouchListener(new OnItemTouchListener() {
/**
* Silently observe and/or take over touch events sent to the RecyclerView before
* they are handled by either the RecyclerView itself or its child views.
*/
@Override
public boolean onInterceptTouchEvent(@NonNull RecyclerView rv, @NonNull MotionEvent e) {
if (e.getAction() == MotionEvent.ACTION_DOWN) {
if (isFabOpen) {
animateFAB(isFabOpen);
}
}
return false;
}
/**
* Process a touch event as part of a gesture that was claimed by returning true
* from a previous call to {@link #onInterceptTouchEvent}.
*
* @param rv
* @param e MotionEvent describing the touch event. All coordinates are in the
* RecyclerView's coordinate system.
*/
@Override
public void onTouchEvent(@NonNull RecyclerView rv, @NonNull MotionEvent e) {
//required abstract method DO NOT DELETE
}
/**
* Called when a child of RecyclerView does not want RecyclerView and its ancestors
* to intercept touch events with {@link ViewGroup#onInterceptTouchEvent(MotionEvent)}.
*
* @param disallowIntercept True if the child does not want the parent to intercept
* touch events.
*/
@Override
public void onRequestDisallowInterceptTouchEvent(boolean disallowIntercept) {
//required abstract method DO NOT DELETE
}
});
}
private int getSpanCount(final int orientation) {
return orientation == Configuration.ORIENTATION_LANDSCAPE ?
SPAN_COUNT_LANDSCAPE : SPAN_COUNT_PORTRAIT;
}
@Override
public void onConfigurationChanged(final Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// check orientation
fab_layout.setOrientation(newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE ?
LinearLayout.HORIZONTAL : LinearLayout.VERTICAL);
rvContributionsList
.setLayoutManager(
new GridLayoutManager(getContext(), getSpanCount(newConfig.orientation)));
}
private void initializeAnimations() {
fab_open = AnimationUtils.loadAnimation(getActivity(), R.anim.fab_open);
fab_close = AnimationUtils.loadAnimation(getActivity(), R.anim.fab_close);
rotate_forward = AnimationUtils.loadAnimation(getActivity(), R.anim.rotate_forward);
rotate_backward = AnimationUtils.loadAnimation(getActivity(), R.anim.rotate_backward);
}
private void setListeners() {
fabPlus.setOnClickListener(view -> animateFAB(isFabOpen));
fabCamera.setOnClickListener(view -> {
controller.initiateCameraPick(getActivity());
animateFAB(isFabOpen);
});
fabGallery.setOnClickListener(view -> {
controller.initiateGalleryPick(getActivity(), true);
animateFAB(isFabOpen);
});
}
/**
* Launch Custom Selector.
*/
@OnClick(R.id.fab_custom_gallery)
void launchCustomSelector(){
controller.initiateCustomGalleryPickWithPermission(getActivity());
animateFAB(isFabOpen);
}
public void scrollToTop() {
rvContributionsList.smoothScrollToPosition(0);
}
private void animateFAB(final boolean isFabOpen) {
this.isFabOpen = !isFabOpen;
if (fabPlus.isShown()) {
if (isFabOpen) {
fabPlus.startAnimation(rotate_backward);
fabCamera.startAnimation(fab_close);
fabGallery.startAnimation(fab_close);
fabCustomGallery.startAnimation(fab_close);
fabCamera.hide();
fabGallery.hide();
fabCustomGallery.hide();
} else {
fabPlus.startAnimation(rotate_forward);
fabCamera.startAnimation(fab_open);
fabGallery.startAnimation(fab_open);
fabCustomGallery.startAnimation(fab_open);
fabCamera.show();
fabGallery.show();
fabCustomGallery.show();
}
this.isFabOpen = !isFabOpen;
}
}
/**
* Shows welcome message if user has no contributions yet i.e. new user.
*/
@Override
public void showWelcomeTip(final boolean shouldShow) {
noContributionsYet.setVisibility(shouldShow ? VISIBLE : GONE);
}
/**
* Responsible to set progress bar invisible and visible
*
* @param shouldShow True when contributions list should be hidden.
*/
@Override
public void showProgress(final boolean shouldShow) {
progressBar.setVisibility(shouldShow ? VISIBLE : GONE);
}
@Override
public void showNoContributionsUI(final boolean shouldShow) {
noContributionsYet.setVisibility(shouldShow ? VISIBLE : GONE);
}
@Override
public void onSaveInstanceState(@NonNull Bundle outState) {
super.onSaveInstanceState(outState);
final GridLayoutManager layoutManager = (GridLayoutManager) rvContributionsList
.getLayoutManager();
outState.putParcelable(RV_STATE, layoutManager.onSaveInstanceState());
}
@Override
public void onViewStateRestored(@Nullable Bundle savedInstanceState) {
super.onViewStateRestored(savedInstanceState);
if (null != savedInstanceState) {
final Parcelable savedRecyclerLayoutState = savedInstanceState.getParcelable(RV_STATE);
rvContributionsList.getLayoutManager().onRestoreInstanceState(savedRecyclerLayoutState);
}
}
@Override
public void retryUpload(final Contribution contribution) {
if (null != callback) {//Just being safe, ideally they won't be called when detached
callback.retryUpload(contribution);
}
}
@Override
public void deleteUpload(final Contribution contribution) {
contributionsListPresenter.deleteUpload(contribution);
}
@Override
public void openMediaDetail(final int position, boolean isWikipediaButtonDisplayed) {
if (null != callback) {//Just being safe, ideally they won't be called when detached
callback.showDetail(position, isWikipediaButtonDisplayed);
}
}
/**
* Handle callback for wikipedia icon clicked
*
* @param contribution
*/
@Override
public void addImageToWikipedia(Contribution contribution) {
DialogUtil.showAlertDialog(getActivity(),
getString(R.string.add_picture_to_wikipedia_article_title),
String.format(getString(R.string.add_picture_to_wikipedia_article_desc),
Locale.getDefault().getDisplayLanguage()),
() -> {
showAddImageToWikipediaInstructions(contribution);
}, () -> {
// do nothing
});
}
/**
* Pauses the current upload
*
* @param contribution
*/
@Override
public void pauseUpload(Contribution contribution) {
ViewUtil.showShortToast(getContext(), R.string.pausing_upload);
callback.pauseUpload(contribution);
}
/**
* Resumes the current upload
*
* @param contribution
*/
@Override
public void resumeUpload(Contribution contribution) {
ViewUtil.showShortToast(getContext(), R.string.resuming_upload);
callback.retryUpload(contribution);
}
/**
* Display confirmation dialog with instructions when the user tries to add image to wikipedia
*
* @param contribution
*/
private void showAddImageToWikipediaInstructions(Contribution contribution) {
FragmentManager fragmentManager = getFragmentManager();
WikipediaInstructionsDialogFragment fragment = WikipediaInstructionsDialogFragment
.newInstance(contribution);
fragment.setCallback(this::onConfirmClicked);
fragment.show(fragmentManager, "WikimediaFragment");
}
public Media getMediaAtPosition(final int i) {
if (adapter.getContributionForPosition(i) != null) {
return adapter.getContributionForPosition(i).getMedia();
}
return null;
}
public int getTotalMediaCount() {
return contributionsSize;
}
/**
* Open the editor for the language Wikipedia
*
* @param contribution
*/
@Override
public void onConfirmClicked(@Nullable Contribution contribution, boolean copyWikicode) {
if (copyWikicode) {
String wikicode = contribution.getMedia().getWikiCode();
Utils.copy("wikicode", wikicode, getContext());
}
final String url =
languageWikipediaSite.mobileUrl() + "/wiki/" + contribution.getWikidataPlace()
.getWikipediaPageTitle();
Utils.handleWebUrl(getContext(), Uri.parse(url));
}
public Integer getContributionStateAt(int position) {
return adapter.getContributionForPosition(position).getState();
}
public interface Callback {
void notifyDataSetChanged();
void retryUpload(Contribution contribution);
void showDetail(int position, boolean isWikipediaButtonDisplayed);
void pauseUpload(Contribution contribution);
// Notify the viewpager that number of items have changed.
void viewPagerNotifyDataSetChanged();
}
}
| |
/*
* Copyright 2013 Square Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.wire;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import okio.Okio;
import okio.Source;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class WireCompilerTest {
private StringWireLogger logger;
private File testDir;
@Before public void setUp() {
System.out.println("cwd = " + new File(".").getAbsolutePath());
testDir = makeTestDirectory("WireCompilerTest");
}
private File makeTestDirectory(String path) {
File dir = new File(path);
dir.mkdir();
cleanup(dir);
List<String> filesBefore = getAllFiles(dir);
assertThat(filesBefore).hasSize(0);
return dir;
}
@After public void tearDown() {
cleanupAndDelete(testDir);
}
private void cleanupAndDelete(File dir) {
cleanup(dir);
if (!dir.delete()) {
System.err.println("Couldn't delete " + dir.getAbsolutePath());
}
}
private void testProto(String[] sources, String[] outputs) throws Exception {
List<String> args = new ArrayList<>();
args.add("--proto_path=../wire-runtime/src/test/proto");
args.add("--java_out=" + testDir.getAbsolutePath());
args.add("--enum_options=squareup.protos.custom_options.enum_value_option,"
+ "squareup.protos.custom_options.complex_enum_value_option,"
+ "squareup.protos.foreign.foreign_enum_value_option");
args.addAll(Arrays.asList(sources));
invokeCompiler(args.toArray(new String[args.size()]));
List<String> filesAfter = getAllFiles(testDir);
assertThat(filesAfter.size())
.overridingErrorMessage(filesAfter.toString())
.isEqualTo(outputs.length);
for (String output : outputs) {
assertFilesMatch(testDir, output);
}
}
private void testProtoAndroid(String[] sources, String[] outputs) throws Exception {
List<String> args = new ArrayList<>();
args.add("--proto_path=../wire-runtime/src/test/proto");
args.add("--java_out=" + testDir.getAbsolutePath());
args.add("--android");
args.add("--enum_options=squareup.protos.custom_options.enum_value_option,"
+ "squareup.protos.custom_options.complex_enum_value_option,"
+ "squareup.protos.foreign.foreign_enum_value_option");
args.addAll(Arrays.asList(sources));
invokeCompiler(args.toArray(new String[args.size()]));
List<String> filesAfter = getAllFiles(testDir);
assertThat(filesAfter.size())
.overridingErrorMessage(filesAfter.toString())
.isEqualTo(outputs.length);
for (String output : outputs) {
assertFilesMatchAndroid(testDir, output);
}
}
private void testProtoNoOptions(String[] sources, String[] outputs) throws Exception {
int numFlags = 4;
String[] args = new String[numFlags + sources.length];
args[0] = "--proto_path=../wire-runtime/src/test/proto";
args[1] = "--no_options";
// Emit one of the enum options anyway.
args[2] = "--enum_options=squareup.protos.custom_options.enum_value_option";
args[3] = "--java_out=" + testDir.getAbsolutePath();
System.arraycopy(sources, 0, args, numFlags, sources.length);
invokeCompiler(args);
List<String> filesAfter = getAllFiles(testDir);
assertThat(filesAfter).hasSize(outputs.length);
for (String output : outputs) {
assertFilesMatchNoOptions(testDir, output);
}
}
private void testProtoWithRegistry(String[] sources, String registryClass, String[] outputs)
throws Exception {
int numFlags = 3;
String[] args = new String[numFlags + sources.length];
args[0] = "--proto_path=../wire-runtime/src/test/proto";
args[1] = "--java_out=" + testDir.getAbsolutePath();
args[2] = "--registry_class=" + registryClass;
System.arraycopy(sources, 0, args, numFlags, sources.length);
invokeCompiler(args);
List<String> filesAfter = getAllFiles(testDir);
assertThat(filesAfter).hasSize(outputs.length);
for (String output : outputs) {
assertFilesMatch(testDir, output);
}
}
private void testProtoWithRoots(String[] sources, String roots, String[] outputs)
throws Exception {
String[] extraArgs = {};
this.testProtoWithRoots(sources, roots, outputs, extraArgs);
}
private void testProtoWithRoots(
String[] sources, String roots, String[] outputs, String[] extraArgs) throws Exception {
int numFlags = 3;
String[] args = new String[numFlags + sources.length + extraArgs.length];
int index = 0;
args[index++] = "--proto_path=../wire-runtime/src/test/proto";
args[index++] = "--java_out=" + testDir.getAbsolutePath();
args[index++] = "--roots=" + roots;
for (int i = 0; i < extraArgs.length; i++) {
args[index++] = extraArgs[i];
}
System.arraycopy(sources, 0, args, index, sources.length);
invokeCompiler(args);
List<String> filesAfter = getAllFiles(testDir);
assertThat(filesAfter.size())
.overridingErrorMessage("Wrong number of files written")
.isEqualTo(outputs.length);
for (String output : outputs) {
assertFilesMatch(testDir, output);
}
}
@Test public void testFooBar() throws Exception {
String[] sources = {
"foo.proto",
"bar.proto"
};
String[] outputs = {
"com/squareup/foobar/protos/bar/Bar.java",
"com/squareup/foobar/protos/foo/Foo.java"
};
testProto(sources, outputs);
}
@Test public void testDifferentPackageFooBar() throws Exception {
String[] sources = {
"differentpackage/foo.proto",
"differentpackage/bar.proto"
};
String[] outputs = {
"com/squareup/differentpackage/protos/bar/Bar.java",
"com/squareup/differentpackage/protos/foo/Foo.java"
};
testProto(sources, outputs);
}
@Test public void testPerson() throws Exception {
String[] sources = {
"person.proto"
};
String[] outputs = {
"com/squareup/wire/protos/person/Person.java"
};
testProto(sources, outputs);
}
@Test public void testPersonAndroid() throws Exception {
String[] sources = {
"person.proto"
};
String[] outputs = {
"com/squareup/wire/protos/person/Person.java"
};
testProtoAndroid(sources, outputs);
}
@Test public void testSimple() throws Exception {
String[] sources = {
"simple_message.proto",
"external_message.proto",
"foreign.proto"
};
String[] outputs = {
"com/squareup/wire/protos/simple/Ext_simple_message.java",
"com/squareup/wire/protos/simple/SimpleMessage.java",
"com/squareup/wire/protos/simple/ExternalMessage.java",
"com/squareup/wire/protos/foreign/Ext_foreign.java",
"com/squareup/wire/protos/foreign/ForeignEnum.java",
"com/squareup/wire/protos/foreign/ForeignMessage.java"
};
testProto(sources, outputs);
}
@Test public void testOneOf() throws Exception {
String[] sources = {
"one_of.proto"
};
String[] outputs = {
"com/squareup/wire/protos/oneof/OneOfMessage.java"
};
testProto(sources, outputs);
}
@Test public void testRegistry() throws Exception {
String[] sources = {
"simple_message.proto",
"external_message.proto",
"foreign.proto"
};
String registry = "com.squareup.wire.protos.ProtoRegistry";
String[] outputs = {
"com/squareup/wire/protos/ProtoRegistry.java",
"com/squareup/wire/protos/simple/Ext_simple_message.java",
"com/squareup/wire/protos/simple/SimpleMessage.java",
"com/squareup/wire/protos/simple/ExternalMessage.java",
"com/squareup/wire/protos/foreign/Ext_foreign.java",
"com/squareup/wire/protos/foreign/ForeignEnum.java",
"com/squareup/wire/protos/foreign/ForeignMessage.java"
};
testProtoWithRegistry(sources, registry, outputs);
}
@Test public void testEmptyRegistry() throws Exception {
String[] sources = {
"person.proto"
};
String registry = "com.squareup.wire.protos.person.EmptyRegistry";
String[] outputs = {
"com/squareup/wire/protos/person/EmptyRegistry.java",
"com/squareup/wire/protos/person/Person.java"
};
testProtoWithRegistry(sources, registry, outputs);
}
@Test public void testOneClassRegistry() throws Exception {
String[] sources = {
"one_extension.proto"
};
String registry = "com.squareup.wire.protos.one_extension.OneExtensionRegistry";
String[] outputs = {
"com/squareup/wire/protos/one_extension/Ext_one_extension.java",
"com/squareup/wire/protos/one_extension/Foo.java",
"com/squareup/wire/protos/one_extension/OneExtension.java",
"com/squareup/wire/protos/one_extension/OneExtensionRegistry.java"
};
testProtoWithRegistry(sources, registry, outputs);
}
@Test public void testSingleLevel() throws Exception {
String[] sources = {
"single_level.proto"
};
String[] outputs = {
"com/squareup/wire/protos/single_level/Foo.java",
"com/squareup/wire/protos/single_level/Foos.java",
};
testProto(sources, outputs);
}
@Test public void testSameBasename() throws Exception {
String[] sources = {
"single_level.proto",
"samebasename/single_level.proto" };
String[] outputs = {
"com/squareup/wire/protos/single_level/Foo.java",
"com/squareup/wire/protos/single_level/Foos.java",
"com/squareup/wire/protos/single_level/Bar.java",
"com/squareup/wire/protos/single_level/Bars.java",
};
testProto(sources, outputs);
}
@Test public void testChildPackage() throws Exception {
String[] sources = {
"child_pkg.proto"
};
String[] outputs = {
"com/squareup/wire/protos/ChildPackage.java",
};
testProto(sources, outputs);
}
@Test public void testAllTypes() throws Exception {
String[] sources = {
"all_types.proto"
};
String[] outputs = {
"com/squareup/wire/protos/alltypes/Ext_all_types.java",
"com/squareup/wire/protos/alltypes/AllTypes.java"
};
testProto(sources, outputs);
}
@Test public void testEdgeCases() throws Exception {
String[] sources = {
"edge_cases.proto"
};
String[] outputs = {
"com/squareup/wire/protos/edgecases/NoFields.java",
"com/squareup/wire/protos/edgecases/OneField.java",
"com/squareup/wire/protos/edgecases/OneBytesField.java",
"com/squareup/wire/protos/edgecases/Recursive.java"
};
testProto(sources, outputs);
}
@Test public void testUnknownFields() throws Exception {
String[] sources = {
"unknown_fields.proto"
};
String[] outputs = {
"com/squareup/wire/protos/unknownfields/VersionOne.java",
"com/squareup/wire/protos/unknownfields/VersionTwo.java"
};
testProto(sources, outputs);
}
@Test public void testCustomOptions() throws Exception {
String[] sources = {
"custom_options.proto"
};
String[] outputs = {
"com/squareup/wire/protos/custom_options/FooBar.java",
"com/squareup/wire/protos/custom_options/Ext_custom_options.java",
"com/squareup/wire/protos/custom_options/MessageWithOptions.java"
};
testProto(sources, outputs);
}
@Test public void testCustomOptionsNoOptions() throws Exception {
String[] sources = {
"custom_options.proto"
};
String[] outputs = {
"com/squareup/wire/protos/custom_options/FooBar.java",
"com/squareup/wire/protos/custom_options/Ext_custom_options.java",
"com/squareup/wire/protos/custom_options/MessageWithOptions.java"
};
testProtoNoOptions(sources, outputs);
}
@Test public void testRedacted() throws Exception {
String[] sources = {
"redacted_test.proto"
};
String[] outputs = {
"com/squareup/wire/protos/redacted/Ext_redacted_test.java",
"com/squareup/wire/protos/redacted/NotRedacted.java",
"com/squareup/wire/protos/redacted/Redacted.java",
"com/squareup/wire/protos/redacted/RedactedChild.java",
"com/squareup/wire/protos/redacted/RedactedCycleA.java",
"com/squareup/wire/protos/redacted/RedactedCycleB.java",
"com/squareup/wire/protos/redacted/RedactedExtension.java",
"com/squareup/wire/protos/redacted/RedactedRepeated.java",
"com/squareup/wire/protos/redacted/RedactedRequired.java",
};
testProto(sources, outputs);
}
@Test public void testNoRoots() throws Exception {
String[] sources = {
"roots.proto"
};
String[] outputs = {
"com/squareup/wire/protos/roots/A.java",
"com/squareup/wire/protos/roots/B.java",
"com/squareup/wire/protos/roots/C.java",
"com/squareup/wire/protos/roots/D.java",
"com/squareup/wire/protos/roots/E.java",
"com/squareup/wire/protos/roots/G.java",
"com/squareup/wire/protos/roots/H.java",
"com/squareup/wire/protos/roots/I.java",
"com/squareup/wire/protos/roots/J.java",
"com/squareup/wire/protos/roots/K.java",
"com/squareup/wire/protos/roots/Ext_roots.java"
};
testProto(sources, outputs);
}
@Test public void testRootsA() throws Exception {
String[] sources = {
"roots.proto"
};
String[] outputs = {
"com/squareup/wire/protos/roots/A.java",
"com/squareup/wire/protos/roots/B.java",
"com/squareup/wire/protos/roots/C.java",
"com/squareup/wire/protos/roots/D.java",
"com/squareup/wire/protos/roots/I.java",
"com/squareup/wire/protos/roots/J.java",
"com/squareup/wire/protos/roots/K.java",
"com/squareup/wire/protos/roots/Ext_roots.java"
};
String roots = "squareup.protos.roots.A";
testProtoWithRoots(sources, roots, outputs);
}
@Test public void testRootsB() throws Exception {
String[] sources = {
"roots.proto"
};
String[] outputs = {
"com/squareup/wire/protos/roots/B.java",
"com/squareup/wire/protos/roots/C.java",
"com/squareup/wire/protos/roots/I.java",
"com/squareup/wire/protos/roots/J.java",
"com/squareup/wire/protos/roots/K.java",
"com/squareup/wire/protos/roots/Ext_roots.java"
};
String roots = "squareup.protos.roots.B";
testProtoWithRoots(sources, roots, outputs);
}
@Test public void testRootsE() throws Exception {
String[] sources = {
"roots.proto"
};
String[] outputs = {
"com/squareup/wire/protos/roots/E.java",
"com/squareup/wire/protos/roots/G.java",
"com/squareup/wire/protos/roots/I.java",
"com/squareup/wire/protos/roots/J.java",
"com/squareup/wire/protos/roots/K.java",
"com/squareup/wire/protos/roots/Ext_roots.java"
};
String roots = "squareup.protos.roots.E";
testProtoWithRoots(sources, roots, outputs);
}
@Test public void testRootsH() throws Exception {
String[] sources = {
"roots.proto"
};
String[] outputs = {
"com/squareup/wire/protos/roots/E.java",
"com/squareup/wire/protos/roots/G.java",
"com/squareup/wire/protos/roots/H.java",
"com/squareup/wire/protos/roots/I.java",
"com/squareup/wire/protos/roots/J.java",
"com/squareup/wire/protos/roots/K.java",
"com/squareup/wire/protos/roots/Ext_roots.java"
};
String roots = "squareup.protos.roots.H";
testProtoWithRoots(sources, roots, outputs);
}
@Test public void testRootsI() throws Exception {
String[] sources = {
"roots.proto"
};
String[] outputs = {
"com/squareup/wire/protos/roots/I.java",
"com/squareup/wire/protos/roots/J.java",
"com/squareup/wire/protos/roots/K.java",
"com/squareup/wire/protos/roots/Ext_roots.java"
};
String roots = "squareup.protos.roots.I";
testProtoWithRoots(sources, roots, outputs);
}
@Test public void testDryRun() throws Exception {
String[] sources = {
"service_root.proto"
};
String[] outputs = { };
String roots = "squareup.wire.protos.roots.TheService";
// When running with the --dry_run flag and --quiet, only the names of the output
// files should be printed to the log.
String[] extraArgs = {
"--dry_run",
"--quiet"
};
testProtoWithRoots(sources, roots, outputs, extraArgs);
assertThat(logger.getLog()).isEqualTo(""
+ testDir.getAbsolutePath() + " com.squareup.wire.protos.roots.TheRequest\n"
+ testDir.getAbsolutePath() + " com.squareup.wire.protos.roots.TheResponse\n");
}
private void cleanup(File dir) {
assertThat(dir).isNotNull();
assertThat(dir.isDirectory()).isTrue();
File[] files = dir.listFiles();
if (files != null) {
for (File f : files) {
cleanupHelper(f);
}
}
}
private void cleanupHelper(File f) {
assertThat(f).isNotNull();
if (f.isDirectory()) {
File[] files = f.listFiles();
if (files != null) {
for (File ff : files) {
cleanupHelper(ff);
}
}
f.delete();
} else {
f.delete();
}
}
private List<String> getAllFiles(File root) {
List<String> files = new ArrayList<>();
getAllFilesHelper(root, files);
return files;
}
private void getAllFilesHelper(File root, List<String> files) {
if (root.isFile()) {
files.add(root.getAbsolutePath());
}
File[] allFiles = root.listFiles();
if (allFiles != null) {
for (File f : allFiles) {
getAllFilesHelper(f, files);
}
}
}
private void invokeCompiler(String[] args) throws Exception {
CommandLineOptions options = new CommandLineOptions(args);
logger = new StringWireLogger(options.quiet);
FileSystem fs = FileSystems.getDefault();
new WireCompiler(options, fs, logger).compile();
}
private void assertFilesMatch(File outputDir, String path) throws IOException {
File expectedFile = new File("../wire-runtime/src/test/proto-java/" + path);
File actualFile = new File(outputDir, path);
assertFilesMatch(expectedFile, actualFile);
}
private void assertFilesMatchNoOptions(File outputDir, String path) throws IOException {
// Compare against file with .noOptions suffix if present
File expectedFile = new File("../wire-runtime/src/test/proto-java/" + path + ".noOptions");
if (expectedFile.exists()) {
System.out.println("Comparing against expected output " + expectedFile.getName());
} else {
expectedFile = new File("../wire-runtime/src/test/proto-java/" + path);
}
File actualFile = new File(outputDir, path);
assertFilesMatch(expectedFile, actualFile);
}
private void assertFilesMatchAndroid(File outputDir, String path) throws IOException {
// Compare against file with .android suffix if present
File expectedFile = new File("../wire-runtime/src/test/proto-java/" + path + ".android");
if (expectedFile.exists()) {
System.out.println("Comparing against expected output " + expectedFile.getName());
} else {
expectedFile = new File("../wire-runtime/src/test/proto-java/" + path);
}
File actualFile = new File(outputDir, path);
assertFilesMatch(expectedFile, actualFile);
}
private void assertFilesMatch(File expectedFile, File actualFile) throws IOException {
String expected;
try (Source source = Okio.source(expectedFile)) {
expected = Okio.buffer(source).readUtf8();
}
String actual;
try (Source source = Okio.source(actualFile)) {
actual = Okio.buffer(source).readUtf8();
}
// Normalize CRLF -> LF
expected = expected.replace("\r\n", "\n");
actual = actual.replace("\r\n", "\n");
assertThat(actual).isEqualTo(expected);
}
}
| |
/*
* This file provided by Facebook is for non-commercial testing and evaluation
* purposes only. Facebook reserves all rights not expressly granted.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.nutrition.express.imageviewer.zoomable;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.drawable.Animatable;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.ViewConfiguration;
import androidx.annotation.Nullable;
import com.facebook.common.internal.Preconditions;
import com.facebook.common.logging.FLog;
import com.facebook.drawee.controller.AbstractDraweeController;
import com.facebook.drawee.controller.BaseControllerListener;
import com.facebook.drawee.controller.ControllerListener;
import com.facebook.drawee.generic.GenericDraweeHierarchy;
import com.facebook.drawee.interfaces.DraweeController;
import com.facebook.drawee.view.DraweeView;
/**
* DraweeView that has zoomable capabilities.
* <p>
* Once the image loads, pinch-to-zoom and translation gestures are enabled.
*
*/
public class ZoomableDraweeView extends DraweeView<GenericDraweeHierarchy>
implements ZoomableController.Listener {
private static final Class<?> TAG = ZoomableDraweeView.class;
private static final float HUGE_IMAGE_SCALE_FACTOR_THRESHOLD = 1.1f;
private final RectF mImageBounds = new RectF();
private final RectF mViewBounds = new RectF();
private final ControllerListener mControllerListener = new BaseControllerListener<Object>() {
@Override
public void onFinalImageSet(
String id,
@Nullable Object imageInfo,
@Nullable Animatable animatable) {
ZoomableDraweeView.this.onFinalImageSet();
}
@Override
public void onRelease(String id) {
ZoomableDraweeView.this.onRelease();
}
};
private DraweeController mHugeImageController;
private ZoomableController mZoomableController = DefaultZoomableController.newInstance();
public ZoomableDraweeView(Context context) {
super(context);
init();
}
public ZoomableDraweeView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public ZoomableDraweeView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
mZoomableController.setListener(this);
}
public void setZoomableController(ZoomableController zoomableController) {
Preconditions.checkNotNull(zoomableController);
mZoomableController.setListener(null);
mZoomableController = zoomableController;
mZoomableController.setListener(this);
}
@Override
public void setController(@Nullable DraweeController controller) {
setControllers(controller, null);
}
private void setControllersInternal(
@Nullable DraweeController controller,
@Nullable DraweeController hugeImageController) {
removeControllerListener(getController());
addControllerListener(controller);
mHugeImageController = hugeImageController;
super.setController(controller);
}
/**
* Sets the controllers for the normal and huge image.
*
* <p> IMPORTANT: in order to avoid a flicker when switching to the huge image, the huge image
* controller should have the normal-image-uri set as its low-res-uri.
*
* @param controller controller to be initially used
* @param hugeImageController controller to be used after the client starts zooming-in
*/
public void setControllers(
@Nullable DraweeController controller,
@Nullable DraweeController hugeImageController) {
setControllersInternal(null, null);
mZoomableController.setEnabled(false);
setControllersInternal(controller, hugeImageController);
}
private void maybeSetHugeImageController() {
if (mHugeImageController != null &&
mZoomableController.getScaleFactor() > HUGE_IMAGE_SCALE_FACTOR_THRESHOLD) {
setControllersInternal(mHugeImageController, null);
}
}
private void removeControllerListener(DraweeController controller) {
if (controller instanceof AbstractDraweeController) {
((AbstractDraweeController) controller)
.removeControllerListener(mControllerListener);
}
}
private void addControllerListener(DraweeController controller) {
if (controller instanceof AbstractDraweeController) {
((AbstractDraweeController) controller)
.addControllerListener(mControllerListener);
}
}
@Override
protected void onDraw(Canvas canvas) {
int saveCount = canvas.save();
canvas.concat(mZoomableController.getTransform());
super.onDraw(canvas);
canvas.restoreToCount(saveCount);
}
private long startClickTime;
private float initialMotionX, initialMotionY;
private int touchSlop = ViewConfiguration.get(getContext()).getScaledTouchSlop();
private Runnable longClick = new Runnable() {
@Override
public void run() {
performLongClick();
}
};
@Override
public boolean onTouchEvent(MotionEvent event) {
float x = event.getX();
float y = event.getY();
//I add click detection
switch (event.getActionMasked()) {
case MotionEvent.ACTION_DOWN:
startClickTime = System.currentTimeMillis();
postDelayed(longClick, ViewConfiguration.getLongPressTimeout());
initialMotionX = x;
initialMotionY = y;
break;
case MotionEvent.ACTION_UP:
long clickDuration = System.currentTimeMillis() - startClickTime;
if (clickDuration < 200) {
performClick();
}
getParent().requestDisallowInterceptTouchEvent(false);
removeCallbacks(longClick);
break;
case MotionEvent.ACTION_CANCEL:
removeCallbacks(longClick);
case MotionEvent.ACTION_MOVE:
float xDiff = Math.abs(x - initialMotionX);
float yDiff = Math.abs(y - initialMotionY);
if (xDiff > touchSlop || yDiff > touchSlop) {
removeCallbacks(longClick);
}
break;
case MotionEvent.ACTION_POINTER_DOWN:
getParent().requestDisallowInterceptTouchEvent(true);
removeCallbacks(longClick);
break;
}
if (mZoomableController.onTouchEvent(event)) {
if (mZoomableController.getScaleFactor() > 1.0f) {
getParent().requestDisallowInterceptTouchEvent(true);
}
//FLog.v(TAG, "onTouchEvent: view %x, handled by zoomable controller", this.hashCode());
return true;
}
//FLog.v(TAG, "onTouchEvent: view %x, handled by the super", this.hashCode());
return super.onTouchEvent(event);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
FLog.v(TAG, "onLayout: view %x", this.hashCode());
super.onLayout(changed, left, top, right, bottom);
updateZoomableControllerBounds();
}
private void onFinalImageSet() {
FLog.v(TAG, "onFinalImageSet: view %x", this.hashCode());
if (!mZoomableController.isEnabled()) {
updateZoomableControllerBounds();
mZoomableController.setEnabled(true);
}
}
private void onRelease() {
FLog.v(TAG, "onRelease: view %x", this.hashCode());
mZoomableController.setEnabled(false);
}
@Override
public void onTransformChanged(Matrix transform) {
FLog.v(TAG, "onTransformChanged: view %x", this.hashCode());
maybeSetHugeImageController();
invalidate();
}
private void updateZoomableControllerBounds() {
getHierarchy().getActualImageBounds(mImageBounds);
mViewBounds.set(0, 0, getWidth(), getHeight());
mZoomableController.setImageBounds(mImageBounds);
mZoomableController.setViewBounds(mViewBounds);
FLog.v(
TAG,
"updateZoomableControllerBounds: view %x, view bounds: %s, image bounds: %s",
this.hashCode(),
mViewBounds,
mImageBounds);
}
public void reset() {
mZoomableController.getTransform().reset();
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.session;
import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ParceledListSlice;
import android.media.AudioAttributes;
import android.media.MediaDescription;
import android.media.MediaMetadata;
import android.media.Rating;
import android.media.VolumeProvider;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.Parcel;
import android.os.Parcelable;
import android.os.RemoteException;
import android.os.ResultReceiver;
import android.os.UserHandle;
import android.service.media.MediaBrowserService;
import android.text.TextUtils;
import android.util.Log;
import android.view.KeyEvent;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.ref.WeakReference;
import java.util.List;
/**
* Allows interaction with media controllers, volume keys, media buttons, and
* transport controls.
* <p>
* A MediaSession should be created when an app wants to publish media playback
* information or handle media keys. In general an app only needs one session
* for all playback, though multiple sessions can be created to provide finer
* grain controls of media.
* <p>
* Once a session is created the owner of the session may pass its
* {@link #getSessionToken() session token} to other processes to allow them to
* create a {@link MediaController} to interact with the session.
* <p>
* To receive commands, media keys, and other events a {@link Callback} must be
* set with {@link #setCallback(Callback)} and {@link #setActive(boolean)
* setActive(true)} must be called.
* <p>
* When an app is finished performing playback it must call {@link #release()}
* to clean up the session and notify any controllers.
* <p>
* MediaSession objects are thread safe.
*/
public final class MediaSession {
private static final String TAG = "MediaSession";
/**
* Set this flag on the session to indicate that it can handle media button
* events.
*/
public static final int FLAG_HANDLES_MEDIA_BUTTONS = 1 << 0;
/**
* Set this flag on the session to indicate that it handles transport
* control commands through its {@link Callback}.
*/
public static final int FLAG_HANDLES_TRANSPORT_CONTROLS = 1 << 1;
/**
* System only flag for a session that needs to have priority over all other
* sessions. This flag ensures this session will receive media button events
* regardless of the current ordering in the system.
*
* @hide
*/
public static final int FLAG_EXCLUSIVE_GLOBAL_PRIORITY = 1 << 16;
/** @hide */
@Retention(RetentionPolicy.SOURCE)
@IntDef(flag = true, value = {
FLAG_HANDLES_MEDIA_BUTTONS,
FLAG_HANDLES_TRANSPORT_CONTROLS,
FLAG_EXCLUSIVE_GLOBAL_PRIORITY })
public @interface SessionFlags { }
private final Object mLock = new Object();
private final int mMaxBitmapSize;
private final MediaSession.Token mSessionToken;
private final MediaController mController;
private final ISession mBinder;
private final CallbackStub mCbStub;
private CallbackMessageHandler mCallback;
private VolumeProvider mVolumeProvider;
private PlaybackState mPlaybackState;
private boolean mActive = false;
/**
* Creates a new session. The session will automatically be registered with
* the system but will not be published until {@link #setActive(boolean)
* setActive(true)} is called. You must call {@link #release()} when
* finished with the session.
*
* @param context The context to use to create the session.
* @param tag A short name for debugging purposes.
*/
public MediaSession(@NonNull Context context, @NonNull String tag) {
this(context, tag, UserHandle.myUserId());
}
/**
* Creates a new session as the specified user. To create a session as a
* user other than your own you must hold the
* {@link android.Manifest.permission#INTERACT_ACROSS_USERS_FULL}
* permission.
*
* @param context The context to use to create the session.
* @param tag A short name for debugging purposes.
* @param userId The user id to create the session as.
* @hide
*/
public MediaSession(@NonNull Context context, @NonNull String tag, int userId) {
if (context == null) {
throw new IllegalArgumentException("context cannot be null.");
}
if (TextUtils.isEmpty(tag)) {
throw new IllegalArgumentException("tag cannot be null or empty");
}
mMaxBitmapSize = context.getResources().getDimensionPixelSize(
com.android.internal.R.dimen.config_mediaMetadataBitmapMaxSize);
mCbStub = new CallbackStub(this);
MediaSessionManager manager = (MediaSessionManager) context
.getSystemService(Context.MEDIA_SESSION_SERVICE);
try {
mBinder = manager.createSession(mCbStub, tag, userId);
mSessionToken = new Token(mBinder.getController());
mController = new MediaController(context, mSessionToken);
} catch (RemoteException e) {
throw new RuntimeException("Remote error creating session.", e);
}
}
/**
* Set the callback to receive updates for the MediaSession. This includes
* media button events and transport controls. The caller's thread will be
* used to post updates.
* <p>
* Set the callback to null to stop receiving updates.
*
* @param callback The callback object
*/
public void setCallback(@Nullable Callback callback) {
setCallback(callback, null);
}
/**
* Set the callback to receive updates for the MediaSession. This includes
* media button events and transport controls.
* <p>
* Set the callback to null to stop receiving updates.
*
* @param callback The callback to receive updates on.
* @param handler The handler that events should be posted on.
*/
public void setCallback(@Nullable Callback callback, @Nullable Handler handler) {
synchronized (mLock) {
if (callback == null) {
if (mCallback != null) {
mCallback.mCallback.mSession = null;
}
mCallback = null;
return;
}
if (mCallback != null) {
// We're updating the callback, clear the session from the old
// one.
mCallback.mCallback.mSession = null;
}
if (handler == null) {
handler = new Handler();
}
callback.mSession = this;
CallbackMessageHandler msgHandler = new CallbackMessageHandler(handler.getLooper(),
callback);
mCallback = msgHandler;
}
}
/**
* Set an intent for launching UI for this Session. This can be used as a
* quick link to an ongoing media screen. The intent should be for an
* activity that may be started using {@link Activity#startActivity(Intent)}.
*
* @param pi The intent to launch to show UI for this Session.
*/
public void setSessionActivity(@Nullable PendingIntent pi) {
try {
mBinder.setLaunchPendingIntent(pi);
} catch (RemoteException e) {
Log.wtf(TAG, "Failure in setLaunchPendingIntent.", e);
}
}
/**
* Set a pending intent for your media button receiver to allow restarting
* playback after the session has been stopped. If your app is started in
* this way an {@link Intent#ACTION_MEDIA_BUTTON} intent will be sent via
* the pending intent.
*
* @param mbr The {@link PendingIntent} to send the media button event to.
*/
public void setMediaButtonReceiver(@Nullable PendingIntent mbr) {
try {
mBinder.setMediaButtonReceiver(mbr);
} catch (RemoteException e) {
Log.wtf(TAG, "Failure in setMediaButtonReceiver.", e);
}
}
/**
* Set any flags for the session.
*
* @param flags The flags to set for this session.
*/
public void setFlags(@SessionFlags int flags) {
try {
mBinder.setFlags(flags);
} catch (RemoteException e) {
Log.wtf(TAG, "Failure in setFlags.", e);
}
}
/**
* Set the attributes for this session's audio. This will affect the
* system's volume handling for this session. If
* {@link #setPlaybackToRemote} was previously called it will stop receiving
* volume commands and the system will begin sending volume changes to the
* appropriate stream.
* <p>
* By default sessions use attributes for media.
*
* @param attributes The {@link AudioAttributes} for this session's audio.
*/
public void setPlaybackToLocal(AudioAttributes attributes) {
if (attributes == null) {
throw new IllegalArgumentException("Attributes cannot be null for local playback.");
}
try {
mBinder.setPlaybackToLocal(attributes);
} catch (RemoteException e) {
Log.wtf(TAG, "Failure in setPlaybackToLocal.", e);
}
}
/**
* Configure this session to use remote volume handling. This must be called
* to receive volume button events, otherwise the system will adjust the
* appropriate stream volume for this session. If
* {@link #setPlaybackToLocal} was previously called the system will stop
* handling volume changes for this session and pass them to the volume
* provider instead.
*
* @param volumeProvider The provider that will handle volume changes. May
* not be null.
*/
public void setPlaybackToRemote(@NonNull VolumeProvider volumeProvider) {
if (volumeProvider == null) {
throw new IllegalArgumentException("volumeProvider may not be null!");
}
synchronized (mLock) {
mVolumeProvider = volumeProvider;
}
volumeProvider.setCallback(new VolumeProvider.Callback() {
@Override
public void onVolumeChanged(VolumeProvider volumeProvider) {
notifyRemoteVolumeChanged(volumeProvider);
}
});
try {
mBinder.setPlaybackToRemote(volumeProvider.getVolumeControl(),
volumeProvider.getMaxVolume());
mBinder.setCurrentVolume(volumeProvider.getCurrentVolume());
} catch (RemoteException e) {
Log.wtf(TAG, "Failure in setPlaybackToRemote.", e);
}
}
/**
* Set if this session is currently active and ready to receive commands. If
* set to false your session's controller may not be discoverable. You must
* set the session to active before it can start receiving media button
* events or transport commands.
*
* @param active Whether this session is active or not.
*/
public void setActive(boolean active) {
if (mActive == active) {
return;
}
try {
mBinder.setActive(active);
mActive = active;
} catch (RemoteException e) {
Log.wtf(TAG, "Failure in setActive.", e);
}
}
/**
* Get the current active state of this session.
*
* @return True if the session is active, false otherwise.
*/
public boolean isActive() {
return mActive;
}
/**
* Send a proprietary event to all MediaControllers listening to this
* Session. It's up to the Controller/Session owner to determine the meaning
* of any events.
*
* @param event The name of the event to send
* @param extras Any extras included with the event
*/
public void sendSessionEvent(@NonNull String event, @Nullable Bundle extras) {
if (TextUtils.isEmpty(event)) {
throw new IllegalArgumentException("event cannot be null or empty");
}
try {
mBinder.sendEvent(event, extras);
} catch (RemoteException e) {
Log.wtf(TAG, "Error sending event", e);
}
}
/**
* This must be called when an app has finished performing playback. If
* playback is expected to start again shortly the session can be left open,
* but it must be released if your activity or service is being destroyed.
*/
public void release() {
try {
mBinder.destroy();
} catch (RemoteException e) {
Log.wtf(TAG, "Error releasing session: ", e);
}
}
/**
* Retrieve a token object that can be used by apps to create a
* {@link MediaController} for interacting with this session. The owner of
* the session is responsible for deciding how to distribute these tokens.
*
* @return A token that can be used to create a MediaController for this
* session
*/
public @NonNull Token getSessionToken() {
return mSessionToken;
}
/**
* Get a controller for this session. This is a convenience method to avoid
* having to cache your own controller in process.
*
* @return A controller for this session.
*/
public @NonNull MediaController getController() {
return mController;
}
/**
* Update the current playback state.
*
* @param state The current state of playback
*/
public void setPlaybackState(@Nullable PlaybackState state) {
mPlaybackState = state;
try {
mBinder.setPlaybackState(state);
} catch (RemoteException e) {
Log.wtf(TAG, "Dead object in setPlaybackState.", e);
}
}
/**
* Update the current metadata. New metadata can be created using
* {@link android.media.MediaMetadata.Builder}.
*
* @param metadata The new metadata
*/
public void setMetadata(@Nullable MediaMetadata metadata) {
if (metadata != null ) {
metadata = (new MediaMetadata.Builder(metadata, mMaxBitmapSize)).build();
}
try {
mBinder.setMetadata(metadata);
} catch (RemoteException e) {
Log.wtf(TAG, "Dead object in setPlaybackState.", e);
}
}
/**
* Update the list of items in the play queue. It is an ordered list and
* should contain the current item, and previous or upcoming items if they
* exist. Specify null if there is no current play queue.
* <p>
* The queue should be of reasonable size. If the play queue is unbounded
* within your app, it is better to send a reasonable amount in a sliding
* window instead.
*
* @param queue A list of items in the play queue.
*/
public void setQueue(@Nullable List<QueueItem> queue) {
try {
mBinder.setQueue(queue == null ? null : new ParceledListSlice<QueueItem>(queue));
} catch (RemoteException e) {
Log.wtf("Dead object in setQueue.", e);
}
}
/**
* Set the title of the play queue. The UI should display this title along
* with the play queue itself.
* e.g. "Play Queue", "Now Playing", or an album name.
*
* @param title The title of the play queue.
*/
public void setQueueTitle(@Nullable CharSequence title) {
try {
mBinder.setQueueTitle(title);
} catch (RemoteException e) {
Log.wtf("Dead object in setQueueTitle.", e);
}
}
/**
* Set the style of rating used by this session. Apps trying to set the
* rating should use this style. Must be one of the following:
* <ul>
* <li>{@link Rating#RATING_NONE}</li>
* <li>{@link Rating#RATING_3_STARS}</li>
* <li>{@link Rating#RATING_4_STARS}</li>
* <li>{@link Rating#RATING_5_STARS}</li>
* <li>{@link Rating#RATING_HEART}</li>
* <li>{@link Rating#RATING_PERCENTAGE}</li>
* <li>{@link Rating#RATING_THUMB_UP_DOWN}</li>
* </ul>
*/
public void setRatingType(int type) {
try {
mBinder.setRatingType(type);
} catch (RemoteException e) {
Log.e(TAG, "Error in setRatingType.", e);
}
}
/**
* Set some extras that can be associated with the {@link MediaSession}. No assumptions should
* be made as to how a {@link MediaController} will handle these extras.
* Keys should be fully qualified (e.g. com.example.MY_EXTRA) to avoid conflicts.
*
* @param extras The extras associated with the {@link MediaSession}.
*/
public void setExtras(@Nullable Bundle extras) {
try {
mBinder.setExtras(extras);
} catch (RemoteException e) {
Log.wtf("Dead object in setExtras.", e);
}
}
/**
* Notify the system that the remote volume changed.
*
* @param provider The provider that is handling volume changes.
* @hide
*/
public void notifyRemoteVolumeChanged(VolumeProvider provider) {
synchronized (mLock) {
if (provider == null || provider != mVolumeProvider) {
Log.w(TAG, "Received update from stale volume provider");
return;
}
}
try {
mBinder.setCurrentVolume(provider.getCurrentVolume());
} catch (RemoteException e) {
Log.e(TAG, "Error in notifyVolumeChanged", e);
}
}
private void dispatchPlay() {
postToCallback(CallbackMessageHandler.MSG_PLAY);
}
private void dispatchPlayFromMediaId(String mediaId, Bundle extras) {
postToCallback(CallbackMessageHandler.MSG_PLAY_MEDIA_ID, mediaId, extras);
}
private void dispatchPlayFromSearch(String query, Bundle extras) {
postToCallback(CallbackMessageHandler.MSG_PLAY_SEARCH, query, extras);
}
private void dispatchPlayFromUri(Uri uri, Bundle extras) {
postToCallback(CallbackMessageHandler.MSG_PLAY_URI, uri, extras);
}
private void dispatchSkipToItem(long id) {
postToCallback(CallbackMessageHandler.MSG_SKIP_TO_ITEM, id);
}
private void dispatchPause() {
postToCallback(CallbackMessageHandler.MSG_PAUSE);
}
private void dispatchStop() {
postToCallback(CallbackMessageHandler.MSG_STOP);
}
private void dispatchNext() {
postToCallback(CallbackMessageHandler.MSG_NEXT);
}
private void dispatchPrevious() {
postToCallback(CallbackMessageHandler.MSG_PREVIOUS);
}
private void dispatchFastForward() {
postToCallback(CallbackMessageHandler.MSG_FAST_FORWARD);
}
private void dispatchRewind() {
postToCallback(CallbackMessageHandler.MSG_REWIND);
}
private void dispatchSeekTo(long pos) {
postToCallback(CallbackMessageHandler.MSG_SEEK_TO, pos);
}
private void dispatchRate(Rating rating) {
postToCallback(CallbackMessageHandler.MSG_RATE, rating);
}
private void dispatchCustomAction(String action, Bundle args) {
postToCallback(CallbackMessageHandler.MSG_CUSTOM_ACTION, action, args);
}
private void dispatchMediaButton(Intent mediaButtonIntent) {
postToCallback(CallbackMessageHandler.MSG_MEDIA_BUTTON, mediaButtonIntent);
}
private void dispatchAdjustVolume(int direction) {
postToCallback(CallbackMessageHandler.MSG_ADJUST_VOLUME, direction);
}
private void dispatchSetVolumeTo(int volume) {
postToCallback(CallbackMessageHandler.MSG_SET_VOLUME, volume);
}
private void postToCallback(int what) {
postToCallback(what, null);
}
private void postCommand(String command, Bundle args, ResultReceiver resultCb) {
Command cmd = new Command(command, args, resultCb);
postToCallback(CallbackMessageHandler.MSG_COMMAND, cmd);
}
private void postToCallback(int what, Object obj) {
postToCallback(what, obj, null);
}
private void postToCallback(int what, Object obj, Bundle extras) {
synchronized (mLock) {
if (mCallback != null) {
mCallback.post(what, obj, extras);
}
}
}
/**
* Return true if this is considered an active playback state.
*
* @hide
*/
public static boolean isActiveState(int state) {
switch (state) {
case PlaybackState.STATE_FAST_FORWARDING:
case PlaybackState.STATE_REWINDING:
case PlaybackState.STATE_SKIPPING_TO_PREVIOUS:
case PlaybackState.STATE_SKIPPING_TO_NEXT:
case PlaybackState.STATE_BUFFERING:
case PlaybackState.STATE_CONNECTING:
case PlaybackState.STATE_PLAYING:
return true;
}
return false;
}
/**
* Represents an ongoing session. This may be passed to apps by the session
* owner to allow them to create a {@link MediaController} to communicate with
* the session.
*/
public static final class Token implements Parcelable {
private ISessionController mBinder;
/**
* @hide
*/
public Token(ISessionController binder) {
mBinder = binder;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeStrongBinder(mBinder.asBinder());
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((mBinder == null) ? 0 : mBinder.asBinder().hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Token other = (Token) obj;
if (mBinder == null) {
if (other.mBinder != null)
return false;
} else if (!mBinder.asBinder().equals(other.mBinder.asBinder()))
return false;
return true;
}
ISessionController getBinder() {
return mBinder;
}
public static final Parcelable.Creator<Token> CREATOR
= new Parcelable.Creator<Token>() {
@Override
public Token createFromParcel(Parcel in) {
return new Token(ISessionController.Stub.asInterface(in.readStrongBinder()));
}
@Override
public Token[] newArray(int size) {
return new Token[size];
}
};
}
/**
* Receives media buttons, transport controls, and commands from controllers
* and the system. A callback may be set using {@link #setCallback}.
*/
public abstract static class Callback {
private MediaSession mSession;
public Callback() {
}
/**
* Called when a controller has sent a command to this session.
* The owner of the session may handle custom commands but is not
* required to.
*
* @param command The command name.
* @param args Optional parameters for the command, may be null.
* @param cb A result receiver to which a result may be sent by the command, may be null.
*/
public void onCommand(@NonNull String command, @Nullable Bundle args,
@Nullable ResultReceiver cb) {
}
/**
* Called when a media button is pressed and this session has the
* highest priority or a controller sends a media button event to the
* session. The default behavior will call the relevant method if the
* action for it was set.
* <p>
* The intent will be of type {@link Intent#ACTION_MEDIA_BUTTON} with a
* KeyEvent in {@link Intent#EXTRA_KEY_EVENT}
*
* @param mediaButtonIntent an intent containing the KeyEvent as an
* extra
* @return True if the event was handled, false otherwise.
*/
public boolean onMediaButtonEvent(@NonNull Intent mediaButtonIntent) {
if (mSession != null
&& Intent.ACTION_MEDIA_BUTTON.equals(mediaButtonIntent.getAction())) {
KeyEvent ke = mediaButtonIntent.getParcelableExtra(Intent.EXTRA_KEY_EVENT);
if (ke != null && ke.getAction() == KeyEvent.ACTION_DOWN) {
PlaybackState state = mSession.mPlaybackState;
long validActions = state == null ? 0 : state.getActions();
switch (ke.getKeyCode()) {
case KeyEvent.KEYCODE_MEDIA_PLAY:
if ((validActions & PlaybackState.ACTION_PLAY) != 0) {
onPlay();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_PAUSE:
if ((validActions & PlaybackState.ACTION_PAUSE) != 0) {
onPause();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_NEXT:
if ((validActions & PlaybackState.ACTION_SKIP_TO_NEXT) != 0) {
onSkipToNext();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
if ((validActions & PlaybackState.ACTION_SKIP_TO_PREVIOUS) != 0) {
onSkipToPrevious();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_STOP:
if ((validActions & PlaybackState.ACTION_STOP) != 0) {
onStop();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD:
if ((validActions & PlaybackState.ACTION_FAST_FORWARD) != 0) {
onFastForward();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_REWIND:
if ((validActions & PlaybackState.ACTION_REWIND) != 0) {
onRewind();
return true;
}
break;
case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
case KeyEvent.KEYCODE_HEADSETHOOK:
boolean isPlaying = state == null ? false
: state.getState() == PlaybackState.STATE_PLAYING;
boolean canPlay = (validActions & (PlaybackState.ACTION_PLAY_PAUSE
| PlaybackState.ACTION_PLAY)) != 0;
boolean canPause = (validActions & (PlaybackState.ACTION_PLAY_PAUSE
| PlaybackState.ACTION_PAUSE)) != 0;
if (isPlaying && canPause) {
onPause();
return true;
} else if (!isPlaying && canPlay) {
onPlay();
return true;
}
break;
}
}
}
return false;
}
/**
* Override to handle requests to begin playback.
*/
public void onPlay() {
}
/**
* Override to handle requests to play a specific mediaId that was
* provided by your app's {@link MediaBrowserService}.
*/
public void onPlayFromMediaId(String mediaId, Bundle extras) {
}
/**
* Override to handle requests to begin playback from a search query. An
* empty query indicates that the app may play any music. The
* implementation should attempt to make a smart choice about what to
* play.
*/
public void onPlayFromSearch(String query, Bundle extras) {
}
/**
* Override to handle requests to play a specific media item represented by a URI.
*/
public void onPlayFromUri(Uri uri, Bundle extras) {
}
/**
* Override to handle requests to play an item with a given id from the
* play queue.
*/
public void onSkipToQueueItem(long id) {
}
/**
* Override to handle requests to pause playback.
*/
public void onPause() {
}
/**
* Override to handle requests to skip to the next media item.
*/
public void onSkipToNext() {
}
/**
* Override to handle requests to skip to the previous media item.
*/
public void onSkipToPrevious() {
}
/**
* Override to handle requests to fast forward.
*/
public void onFastForward() {
}
/**
* Override to handle requests to rewind.
*/
public void onRewind() {
}
/**
* Override to handle requests to stop playback.
*/
public void onStop() {
}
/**
* Override to handle requests to seek to a specific position in ms.
*
* @param pos New position to move to, in milliseconds.
*/
public void onSeekTo(long pos) {
}
/**
* Override to handle the item being rated.
*
* @param rating
*/
public void onSetRating(@NonNull Rating rating) {
}
/**
* Called when a {@link MediaController} wants a {@link PlaybackState.CustomAction} to be
* performed.
*
* @param action The action that was originally sent in the
* {@link PlaybackState.CustomAction}.
* @param extras Optional extras specified by the {@link MediaController}.
*/
public void onCustomAction(@NonNull String action, @Nullable Bundle extras) {
}
}
/**
* @hide
*/
public static class CallbackStub extends ISessionCallback.Stub {
private WeakReference<MediaSession> mMediaSession;
public CallbackStub(MediaSession session) {
mMediaSession = new WeakReference<MediaSession>(session);
}
@Override
public void onCommand(String command, Bundle args, ResultReceiver cb) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.postCommand(command, args, cb);
}
}
@Override
public void onMediaButton(Intent mediaButtonIntent, int sequenceNumber,
ResultReceiver cb) {
MediaSession session = mMediaSession.get();
try {
if (session != null) {
session.dispatchMediaButton(mediaButtonIntent);
}
} finally {
if (cb != null) {
cb.send(sequenceNumber, null);
}
}
}
@Override
public void onPlay() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchPlay();
}
}
@Override
public void onPlayFromMediaId(String mediaId, Bundle extras) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchPlayFromMediaId(mediaId, extras);
}
}
@Override
public void onPlayFromSearch(String query, Bundle extras) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchPlayFromSearch(query, extras);
}
}
@Override
public void onPlayFromUri(Uri uri, Bundle extras) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchPlayFromUri(uri, extras);
}
}
@Override
public void onSkipToTrack(long id) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchSkipToItem(id);
}
}
@Override
public void onPause() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchPause();
}
}
@Override
public void onStop() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchStop();
}
}
@Override
public void onNext() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchNext();
}
}
@Override
public void onPrevious() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchPrevious();
}
}
@Override
public void onFastForward() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchFastForward();
}
}
@Override
public void onRewind() {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchRewind();
}
}
@Override
public void onSeekTo(long pos) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchSeekTo(pos);
}
}
@Override
public void onRate(Rating rating) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchRate(rating);
}
}
@Override
public void onCustomAction(String action, Bundle args) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchCustomAction(action, args);
}
}
@Override
public void onAdjustVolume(int direction) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchAdjustVolume(direction);
}
}
@Override
public void onSetVolumeTo(int value) {
MediaSession session = mMediaSession.get();
if (session != null) {
session.dispatchSetVolumeTo(value);
}
}
}
/**
* A single item that is part of the play queue. It contains a description
* of the item and its id in the queue.
*/
public static final class QueueItem implements Parcelable {
/**
* This id is reserved. No items can be explicitly asigned this id.
*/
public static final int UNKNOWN_ID = -1;
private final MediaDescription mDescription;
private final long mId;
/**
* Create a new {@link MediaSession.QueueItem}.
*
* @param description The {@link MediaDescription} for this item.
* @param id An identifier for this item. It must be unique within the
* play queue and cannot be {@link #UNKNOWN_ID}.
*/
public QueueItem(MediaDescription description, long id) {
if (description == null) {
throw new IllegalArgumentException("Description cannot be null.");
}
if (id == UNKNOWN_ID) {
throw new IllegalArgumentException("Id cannot be QueueItem.UNKNOWN_ID");
}
mDescription = description;
mId = id;
}
private QueueItem(Parcel in) {
mDescription = MediaDescription.CREATOR.createFromParcel(in);
mId = in.readLong();
}
/**
* Get the description for this item.
*/
public MediaDescription getDescription() {
return mDescription;
}
/**
* Get the queue id for this item.
*/
public long getQueueId() {
return mId;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
mDescription.writeToParcel(dest, flags);
dest.writeLong(mId);
}
@Override
public int describeContents() {
return 0;
}
public static final Creator<MediaSession.QueueItem> CREATOR = new Creator<MediaSession.QueueItem>() {
@Override
public MediaSession.QueueItem createFromParcel(Parcel p) {
return new MediaSession.QueueItem(p);
}
@Override
public MediaSession.QueueItem[] newArray(int size) {
return new MediaSession.QueueItem[size];
}
};
@Override
public String toString() {
return "MediaSession.QueueItem {" +
"Description=" + mDescription +
", Id=" + mId + " }";
}
}
private static final class Command {
public final String command;
public final Bundle extras;
public final ResultReceiver stub;
public Command(String command, Bundle extras, ResultReceiver stub) {
this.command = command;
this.extras = extras;
this.stub = stub;
}
}
private class CallbackMessageHandler extends Handler {
private static final int MSG_PLAY = 1;
private static final int MSG_PLAY_MEDIA_ID = 2;
private static final int MSG_PLAY_SEARCH = 3;
private static final int MSG_SKIP_TO_ITEM = 4;
private static final int MSG_PAUSE = 5;
private static final int MSG_STOP = 6;
private static final int MSG_NEXT = 7;
private static final int MSG_PREVIOUS = 8;
private static final int MSG_FAST_FORWARD = 9;
private static final int MSG_REWIND = 10;
private static final int MSG_SEEK_TO = 11;
private static final int MSG_RATE = 12;
private static final int MSG_CUSTOM_ACTION = 13;
private static final int MSG_MEDIA_BUTTON = 14;
private static final int MSG_COMMAND = 15;
private static final int MSG_ADJUST_VOLUME = 16;
private static final int MSG_SET_VOLUME = 17;
private static final int MSG_PLAY_URI = 18;
private MediaSession.Callback mCallback;
public CallbackMessageHandler(Looper looper, MediaSession.Callback callback) {
super(looper, null, true);
mCallback = callback;
}
public void post(int what, Object obj, Bundle bundle) {
Message msg = obtainMessage(what, obj);
msg.setData(bundle);
msg.sendToTarget();
}
public void post(int what, Object obj) {
obtainMessage(what, obj).sendToTarget();
}
public void post(int what) {
post(what, null);
}
public void post(int what, Object obj, int arg1) {
obtainMessage(what, arg1, 0, obj).sendToTarget();
}
@Override
public void handleMessage(Message msg) {
VolumeProvider vp;
switch (msg.what) {
case MSG_PLAY:
mCallback.onPlay();
break;
case MSG_PLAY_MEDIA_ID:
mCallback.onPlayFromMediaId((String) msg.obj, msg.getData());
break;
case MSG_PLAY_SEARCH:
mCallback.onPlayFromSearch((String) msg.obj, msg.getData());
break;
case MSG_PLAY_URI:
mCallback.onPlayFromUri((Uri) msg.obj, msg.getData());
break;
case MSG_SKIP_TO_ITEM:
mCallback.onSkipToQueueItem((Long) msg.obj);
break;
case MSG_PAUSE:
mCallback.onPause();
break;
case MSG_STOP:
mCallback.onStop();
break;
case MSG_NEXT:
mCallback.onSkipToNext();
break;
case MSG_PREVIOUS:
mCallback.onSkipToPrevious();
break;
case MSG_FAST_FORWARD:
mCallback.onFastForward();
break;
case MSG_REWIND:
mCallback.onRewind();
break;
case MSG_SEEK_TO:
mCallback.onSeekTo((Long) msg.obj);
break;
case MSG_RATE:
mCallback.onSetRating((Rating) msg.obj);
break;
case MSG_CUSTOM_ACTION:
mCallback.onCustomAction((String) msg.obj, msg.getData());
break;
case MSG_MEDIA_BUTTON:
mCallback.onMediaButtonEvent((Intent) msg.obj);
break;
case MSG_COMMAND:
Command cmd = (Command) msg.obj;
mCallback.onCommand(cmd.command, cmd.extras, cmd.stub);
break;
case MSG_ADJUST_VOLUME:
synchronized (mLock) {
vp = mVolumeProvider;
}
if (vp != null) {
vp.onAdjustVolume((int) msg.obj);
}
break;
case MSG_SET_VOLUME:
synchronized (mLock) {
vp = mVolumeProvider;
}
if (vp != null) {
vp.onSetVolumeTo((int) msg.obj);
}
break;
}
}
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/site-manage/tags/sakai-10.6/site-manage-tool/tool/src/java/org/sakaiproject/site/tool/SiteBrowserAction.java $
* $Id: SiteBrowserAction.java 133304 2014-01-15 18:23:15Z holladay@longsight.com $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.site.tool;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.sakaiproject.announcement.cover.AnnouncementService;
import org.sakaiproject.authz.cover.SecurityService;
import org.sakaiproject.cheftool.Context;
import org.sakaiproject.cheftool.JetspeedRunData;
import org.sakaiproject.cheftool.PagedResourceActionII;
import org.sakaiproject.cheftool.RunData;
import org.sakaiproject.cheftool.VelocityPortlet;
import org.sakaiproject.cheftool.api.Menu;
import org.sakaiproject.cheftool.menu.MenuImpl;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.content.api.ContentHostingService;
import org.sakaiproject.content.api.FilePickerHelper;
import org.sakaiproject.content.cover.ContentTypeImageService;
import org.sakaiproject.coursemanagement.api.AcademicSession;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.cover.EntityManager;
import org.sakaiproject.event.api.SessionState;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.javax.PagingPosition;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService.SelectionType;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.site.util.SiteTextEditUtil;
import org.sakaiproject.sitemanage.api.SiteHelper;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.api.ToolException;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.util.ResourceLoader;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.velocity.tools.generic.SortTool;
/**
* <p>
* SiteBrowserAction is the Sakai site browser, showing a searchable list of the defined sites, and details including public resources of each when selected.
* </p>
*/
public class SiteBrowserAction extends PagedResourceActionII implements SiteHelper
{
private static final String INTER_SIZE = "inter_size";
private org.sakaiproject.coursemanagement.api.CourseManagementService cms = (org.sakaiproject.coursemanagement.api.CourseManagementService) ComponentManager
.get(org.sakaiproject.coursemanagement.api.CourseManagementService.class);
private ContentHostingService contentHostingService;
private static Log log = LogFactory.getLog(SiteBrowserAction.class);
private static ResourceLoader rb = new ResourceLoader("sitebrowser");
private static final String PREFIX = "sitebrowser.";
private static final String MODE = PREFIX+ "mode";
private final static String SITE_TYPE_ANY = "Any";
private final static String SITE_TERM_ANY = "Any";
private final static String STATE_TERM_SELECTION = PREFIX+ "termSelection";
private final static String STATE_SEARCH_SITE_TYPE = PREFIX+ "siteType";
private final static String STATE_SEARCH_LIST = PREFIX+ "searchList";
private final static String STATE_PROP_SEARCH_MAP = PREFIX+ "propertyCriteriaMap";
private final static String SIMPLE_SEARCH_VIEW = "simpleSearch";
private final static String LIST_VIEW = "list";
// for the site with extra search criteria
private final static String SEARCH_TERM_SITE_TYPE = "termSearchSiteType";
private final static String SEARCH_TERM_PROP = "termProp";
private static final String NO_SHOW_SEARCH_TYPE = "noshow_search_sitetype";
/** for navigating between sites in site list */
private static final String STATE_SITES = PREFIX+ "state_sites";
private static final String STATE_PREV_SITE = PREFIX+ "state_prev_site";
private static final String STATE_NEXT_SITE = PREFIX+ "state_next_site";
private static final String STATE_HELPER_DONE = PREFIX+ "helperDone";
private final static String SORT_KEY_SESSION = "worksitesetup.sort.key.session";
private final static String SORT_ORDER_SESSION = "worksitesetup.sort.order.session";
public SiteBrowserAction() {
contentHostingService = (ContentHostingService) ComponentManager.get(ContentHostingService.class.getName());
}
/**
* {@inheritDoc}
*/
protected List readResourcesPage(SessionState state, int first, int last)
{
// search?
String search = StringUtils.trimToNull((String) state.getAttribute(STATE_SEARCH));
// See what sort of search.
org.sakaiproject.site.api.SiteService.SelectionType type =
(org.sakaiproject.site.api.SiteService.SelectionType)state.getAttribute(SiteHelper.SITE_PICKER_PERMISSION);
List sites = SiteService.getSites(type, state
.getAttribute(STATE_SEARCH_SITE_TYPE), search, (HashMap) state.getAttribute(STATE_PROP_SEARCH_MAP),
org.sakaiproject.site.api.SiteService.SortType.TITLE_ASC, new PagingPosition(first, last));
return sites;
}
/**
* {@inheritDoc}
*/
protected int sizeResources(SessionState state)
{
String search = StringUtils.trimToNull((String) state.getAttribute(STATE_SEARCH));
org.sakaiproject.site.api.SiteService.SelectionType type =
(org.sakaiproject.site.api.SiteService.SelectionType)state.getAttribute(SiteHelper.SITE_PICKER_PERMISSION);
return SiteService.countSites(type, state
.getAttribute(STATE_SEARCH_SITE_TYPE), search, (HashMap) state.getAttribute(STATE_PROP_SEARCH_MAP));
}
/**
* Populate the state object, if needed.
*/
protected void initState(SessionState state, VelocityPortlet portlet, JetspeedRunData rundata)
{
super.initState(state, portlet, rundata);
state.setAttribute(STATE_PAGESIZE, Integer.valueOf(DEFAULT_PAGE_SIZE));
// if site type which requires term search exists
// get all term-search related data from configuration,
String termSearchSiteType = ServerConfigurationService.getString("sitebrowser.termsearch.type");
if (termSearchSiteType != null)
{
state.setAttribute(SEARCH_TERM_SITE_TYPE, termSearchSiteType);
String termSearchProperty = ServerConfigurationService.getString("sitebrowser.termsearch.property");
state.setAttribute(SEARCH_TERM_PROP, termSearchProperty);
}
String[] noSearchSiteTypes = ServerConfigurationService.getStrings("sitesearch.noshow.sitetype");
if (noSearchSiteTypes != null)
{
state.setAttribute(NO_SHOW_SEARCH_TYPE, noSearchSiteTypes);
}
// Make sure we have a permission to be looking for.
if (!(state.getAttribute(SiteHelper.SITE_PICKER_PERMISSION) instanceof org.sakaiproject.site.api.SiteService.SelectionType))
{
// The default is pubview.
state.setAttribute(SiteHelper.SITE_PICKER_PERMISSION, org.sakaiproject.site.api.SiteService.SelectionType.PUBVIEW);
}
// setup the observer to notify our main panel
/*
* if (state.getAttribute(STATE_OBSERVER) == null) { // the delivery location for this tool String deliveryId = clientWindowId(state, portlet.getID()); // the html element to update on delivery String elementId =
* mainPanelUpdateId(portlet.getID()); // the event resource reference pattern to watch for String pattern = SiteService.siteReference(""); state.setAttribute(STATE_OBSERVER, new EventObservingCourier(deliveryId, elementId, pattern)); } // make
* sure the observer is in sync with state updateObservationOfChannel(state, portlet.getID());
*/
} // initState
/**
* Setup our observer to be watching for change events for our channel.
*
* @param peid
* The portlet id.
*/
private void updateObservationOfChannel(SessionState state, String peid)
{
/*
* EventObservingCourier observer = (EventObservingCourier) state.getAttribute(STATE_OBSERVER); // the delivery location for this tool String deliveryId = clientWindowId(state, peid); observer.setDeliveryId(deliveryId);
*/
} // updateObservationOfChannel
/**
* build the context
*/
public String buildMainPanelContext(VelocityPortlet portlet, Context context, RunData rundata, SessionState state)
{
context.put("tlang", rb);
String template = null;
// check mode and dispatch
String mode = (String) state.getAttribute(MODE);
if ((mode == null) || mode.equals(SIMPLE_SEARCH_VIEW))
{
template = buildSimpleSearchContext(state, context);
}
else if (mode.equals(LIST_VIEW))
{
template = buildListContext(state, context);
}
else if ("visit".equals(mode))
{
template = buildVisitContext(state, context);
}
// bjones86 - SAK-24423 - joinable site settings - join from site browser
else if( JoinableSiteSettings.SITE_BROWSER_JOIN_MODE.equalsIgnoreCase( mode ) )
{
if( JoinableSiteSettings.isJoinFromSiteBrowserEnabled() )
{
template = JoinableSiteSettings.buildJoinContextForSiteBrowser( state, context, rb );
}
else
{
Log.warn( "chef", "SiteBrowserAction: mode = " + mode + ", but site browser join is disabled globally" );
template = buildListContext( state, context );
}
}
else
{
Log.warn("chef", "SiteBrowserAction: mode: " + mode);
template = buildListContext(state, context);
}
return (String) getContext(rundata).get("template") + template;
} // buildMainPanelContext
/**
* Build the context for the main list mode.
*/
private String buildListContext(SessionState state, Context context)
{
// put the service in the context (used for allow update calls on each site)
context.put("service", SiteService.getInstance());
context.put("helperMode", Boolean.valueOf(state.getAttribute(Tool.HELPER_DONE_URL) != null));
context.put("termProp", (String) state.getAttribute(SEARCH_TERM_PROP));
context.put("searchText", (String) state.getAttribute(STATE_SEARCH));
context.put("siteType", (String) state.getAttribute(STATE_SEARCH_SITE_TYPE));
context.put("termSelection", (String) state.getAttribute(STATE_TERM_SELECTION));
context.put("siteBrowserTextEdit", new SiteTextEditUtil());
// String newPageSize = state.getAttribute(STATE_PAGESIZE).toString();
Integer newPageSize = (Integer) state.getAttribute(INTER_SIZE);
if (newPageSize != null)
{
context.put("pagesize", newPageSize);
state.setAttribute(STATE_PAGESIZE, newPageSize);
}
else
{
state.setAttribute(STATE_PAGESIZE, Integer.valueOf(DEFAULT_PAGE_SIZE));
context.put("pagesize", Integer.valueOf(DEFAULT_PAGE_SIZE));
}
// prepare the paging of realms
List sites = prepPage(state);
state.setAttribute(STATE_SITES, sites);
context.put("sites", sites);
// bjones86 - SAK-24423 - joinable site settings - put the necessary info into the context for the list interface
JoinableSiteSettings.putSiteMapInContextForSiteBrowser( context, sites );
JoinableSiteSettings.putCurrentUserInContextForSiteBrowser( context );
JoinableSiteSettings.putIsSiteBrowserJoinEnabledInContext( context );
if (state.getAttribute(STATE_NUM_MESSAGES) != null)
context.put("allMsgNumber", state.getAttribute(STATE_NUM_MESSAGES).toString());
// find the position of the message that is the top first on the page
if ((state.getAttribute(STATE_TOP_PAGE_MESSAGE) != null) && (state.getAttribute(STATE_PAGESIZE) != null))
{
int topMsgPos = ((Integer) state.getAttribute(STATE_TOP_PAGE_MESSAGE)).intValue() + 1;
context.put("topMsgPos", Integer.toString(topMsgPos));
int btmMsgPos = topMsgPos + ((Integer) state.getAttribute(STATE_PAGESIZE)).intValue() - 1;
if (state.getAttribute(STATE_NUM_MESSAGES) != null)
{
int allMsgNumber = ((Integer) state.getAttribute(STATE_NUM_MESSAGES)).intValue();
if (btmMsgPos > allMsgNumber) btmMsgPos = allMsgNumber;
}
context.put("btmMsgPos", Integer.toString(btmMsgPos));
}
// build the menu
Menu bar = new MenuImpl();
// add the search commands
// bar.add( new MenuField(FORM_SEARCH, "toolbar", "doSearch", (String) state.getAttribute(STATE_SEARCH)));
// bar.add( new MenuEntry("Search", null, true, MenuItem.CHECKED_NA, "doSearch", "toolbar"));
// add the refresh commands
// %%% we want manual only
addRefreshMenus(bar, state);
if (bar.size() > 0)
{
context.put(Menu.CONTEXT_MENU, bar);
}
boolean goPPButton = state.getAttribute(STATE_PREV_PAGE_EXISTS) != null;
context.put("goPPButton", Boolean.toString(goPPButton));
boolean goNPButton = state.getAttribute(STATE_NEXT_PAGE_EXISTS) != null;
context.put("goNPButton", Boolean.toString(goNPButton));
// }
// inform the observing courier that we just updated the page...
// if there are pending requests to do so they can be cleared
// justDelivered(state);
if (cms != null)
{
Map<String, String> smap =new HashMap<String, String>();
Collection<AcademicSession> sessions = cms.getAcademicSessions();
for (AcademicSession s: sessions) {
smap.put(s.getEid(),s.getTitle());
}
context.put("termsmap", smap );
}
return "_list";
} // buildListContext
/**
* Build the context for the simple search mode.
*/
private String buildSimpleSearchContext(SessionState state, Context context)
{
List newTypes = new Vector();
if (state.getAttribute(NO_SHOW_SEARCH_TYPE) != null)
{
// SAK-19287
String[] noTypes = (String[]) state.getAttribute(NO_SHOW_SEARCH_TYPE);
List<String> oldTypes = SiteService.getSiteTypes();
for (int i = 0; i < noTypes.length; i++) {
if (noTypes[i] != null && noTypes[i].length() > 0) {
String noType = noTypes[i].trim();
if (oldTypes.contains(noType)) {
oldTypes.remove(noType);
}
}
}
newTypes.addAll(oldTypes);
}
else
{
newTypes = SiteService.getSiteTypes();
}
// remove the "myworkspace" type
for (Iterator i = newTypes.iterator(); i.hasNext();)
{
String t = (String) i.next();
if ("myworkspace".equalsIgnoreCase(t))
{
i.remove();
}
}
context.put("siteTypes", newTypes);
String termSearchSiteType = (String) state.getAttribute(SEARCH_TERM_SITE_TYPE);
if (termSearchSiteType != null)
{
context.put("termSearchSiteType", termSearchSiteType);
if (cms != null)
{
context.put("terms", sortAcademicSessions( cms.getAcademicSessions()));
}
}
return "_simpleSearch";
} // buildSimpleSearchContext
/**
* Helper method for sortCmObject
* by order from sakai properties if specified or
* by default of eid, title
* using velocity SortTool
*
* @param sessions
* @return
*/
private Collection sortAcademicSessions(Collection<AcademicSession> sessions) {
// Get the keys from sakai.properties
String[] keys = ServerConfigurationService.getStrings(SORT_KEY_SESSION);
String[] orders = ServerConfigurationService.getStrings(SORT_ORDER_SESSION);
return sortCmObject(sessions, keys, orders);
} // sortCourseOffering
/**
* Custom sort CM collections using properties provided object has getter & setter for
* properties in keys and orders
* defaults to eid & title if none specified
*
* @param collection a collection to be sorted
* @param keys properties to sort on
* @param orders properties on how to sort (asc, dsc)
* @return Collection the sorted collection
*/
private Collection sortCmObject(Collection collection, String[] keys, String[] orders) {
if (collection != null && !collection.isEmpty()) {
// Add them to a list for the SortTool (they must have the form
// "<key:order>" in this implementation)
List propsList = new ArrayList();
if (keys == null || orders == null || keys.length == 0 || orders.length == 0) {
// No keys are specified, so use the default sort order
propsList.add("eid");
propsList.add("title");
} else {
// Populate propsList
for (int i = 0; i < Math.min(keys.length, orders.length); i++) {
String key = keys[i];
String order = orders[i];
propsList.add(key + ":" + order);
}
}
// Sort the collection and return
SortTool sort = new SortTool();
return sort.sort(collection, propsList);
}
return Collections.emptyList();
} // sortCmObject
/**
* Build the context for the visit site mode.
*/
private String buildVisitContext(SessionState state, Context context)
{
List sites = (List) state.getAttribute(STATE_SITES);
String siteId = (String) state.getAttribute("siteId");
try
{
Site site = SiteService.getSite(siteId);
if (sites != null)
{
int pos = -1;
for (int index = 0; index < sites.size() && pos == -1; index++)
{
if (((Site) sites.get(index)).getId().equals(siteId))
{
pos = index;
}
}
// has any previous site in the list?
if (pos > 0)
{
state.setAttribute(STATE_PREV_SITE, sites.get(pos - 1));
}
else
{
state.removeAttribute(STATE_PREV_SITE);
}
// has any next site in the list?
if (pos < sites.size() - 1)
{
state.setAttribute(STATE_NEXT_SITE, sites.get(pos + 1));
}
else
{
state.removeAttribute(STATE_NEXT_SITE);
}
}
if (state.getAttribute(STATE_PREV_SITE) != null)
{
context.put("prevSite", state.getAttribute(STATE_PREV_SITE));
}
if (state.getAttribute(STATE_NEXT_SITE) != null)
{
context.put("nextSite", state.getAttribute(STATE_NEXT_SITE));
}
context.put("site", site);
// get the public announcements
String anncRef = AnnouncementService.channelReference(site.getId(), SiteService.MAIN_CONTAINER);
List announcements = null;
try
{
announcements = AnnouncementService.getMessages(anncRef, null, 0, true, false, true);
}
catch (PermissionException e)
{
announcements = new Vector();
}
context.put("announcements", announcements);
// get the public syllabus
try
{
Object syllabusService = ComponentManager.get("org.sakaiproject.api.app.syllabus.SyllabusService");
Class syllabusServiceClass = syllabusService.getClass();
Class[] paramsClasses = new Class[1];
paramsClasses[0] = java.lang.String.class;
Method getMessages = syllabusServiceClass.getMethod("getMessages", paramsClasses);
String paramSiteId = site.getId();
List syllabusList = (ArrayList) getMessages.invoke(syllabusService, new Object[] { paramSiteId });
context.put("syllabus", syllabusList);
}
catch (Exception reflectionEx)
{
Log.error("Reflection exceptions in SiteBrowserAction for getting public syllabus" + reflectionEx, "");
reflectionEx.printStackTrace();
}
// get the public resources
List resources = contentHostingService.getAllResources(contentHostingService.getSiteCollection(site.getId()));
context.put("resources", resources);
// the height for the info frame
context.put("height", "300px");
// the url for info
String url = site.getInfoUrl();
if (url != null)
{
url = url.trim();
url = convertReferenceUrl(url);
context.put("infoUrl", url);
}
context.put("contentTypeImageService", ContentTypeImageService.getInstance());
// bjones86 - SAK-24423 - joinable site settings - put info into the context for the visit UI
JoinableSiteSettings.putIsSiteBrowserJoinEnabledInContext( context );
JoinableSiteSettings.putIsCurrentUserAlreadyMemberInContextForSiteBrowser( context, siteId );
JoinableSiteSettings.putIsSiteExcludedFromPublic( context, siteId );
}
catch (IdUnusedException err)
{
}
return "_visit";
} // buildVisitContext
public void doNavigate_to_site(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
String siteId = StringUtils.trimToNull(data.getParameters().getString("newSiteId"));
if (siteId != null)
{
state.setAttribute("siteId", siteId);
}
else
{
doBack(data, context);
}
} // doNavigate_to_site
/**
* Handle a request to visit a site.
*/
public void doVisit(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
String id = data.getParameters().getString("id");
// get the site
try
{
Site site = SiteService.getSite(id);
state.setAttribute("siteId", id);
state.setAttribute(MODE, "visit");
// disable auto-updates while in view mode
// ((EventObservingCourier) state.getAttribute(STATE_OBSERVER)).disable();
}
catch (IdUnusedException e)
{
Log.warn("chef", "SiteBrowserAction.doEdit: site not found: " + id);
addAlert(state, rb.getFormattedMessage("notfound", new Object[]{id}));
state.removeAttribute(MODE);
// make sure auto-updates are enabled
// enableObserver(state);
}
} // doVisit
/**
* Handle a request to join a site.
*
* @author bjones86
*
* @param data
* the state to get the settings from
* @param context
* the object to put the settings into
*/
public void doJoin( RunData data, Context context )
{
SessionState state = ( (JetspeedRunData) data).getPortletSessionState( ( (JetspeedRunData) data).getJs_peid() );
String message = JoinableSiteSettings.doJoinForSiteBrowser( state, rb, data.getParameters().getString( "id" ) );
if( message != null && !message.isEmpty() )
{
addAlert( state, message );
}
} // doJoin
/**
* Handle a request to return to the list.
*/
public void doBack(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
state.removeAttribute("siteId");
state.setAttribute(MODE, LIST_VIEW);
} // doBack
/**
* Handle a request to go to Simple Search Mode.
*/
public void doShow_simple_search(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
state.setAttribute(MODE, SIMPLE_SEARCH_VIEW);
} // doShow_simple_search
/**
* Handle a request to go to Advanced Search Mode.
*/
/*
* public void doShowadvsearch(RunData data, Context context) { SessionState state = ((JetspeedRunData)data).getPortletSessionState(((JetspeedRunData)data).getJs_peid()); state.setAttribute("mode", ADV_SEARCH_VIEW); } // doShowadvsearch
*/
/**
* Handle a request to search.
*/
public void doSearch(RunData data, Context context)
{
super.doSearch(data, context);
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
String mode = (String) state.getAttribute(MODE);
state.setAttribute("searchMode", mode);
state.removeAttribute(STATE_PROP_SEARCH_MAP);
state.removeAttribute(STATE_TERM_SELECTION);
// read the search form field into the state object
String siteType = StringUtils.trimToNull(data.getParameters().getString("siteType"));
if (siteType != null)
{
if (siteType.equalsIgnoreCase("Any"))
state.setAttribute(STATE_SEARCH_SITE_TYPE, null);
else
{
state.setAttribute(STATE_SEARCH_SITE_TYPE, siteType);
String termSearchSiteType = (String) state.getAttribute(SEARCH_TERM_SITE_TYPE);
if (termSearchSiteType != null)
{
if (siteType.equals(termSearchSiteType))
{
// search parameter - term; term.eid from UI
String term = StringUtils.trimToNull(data.getParameters().getString("selectTerm"));
if (term != null)
{
state.setAttribute(STATE_TERM_SELECTION, term);
// property criteria map
Map pMap = null;
if (!SITE_TERM_ANY.equals(term))
{
pMap = new HashMap();
pMap.put((String) state.getAttribute(SEARCH_TERM_PROP), term);
state.setAttribute(STATE_PROP_SEARCH_MAP, pMap);
}
}
}
}
}
}
else
{
state.setAttribute(STATE_SEARCH_SITE_TYPE, null);
}
state.setAttribute(MODE, LIST_VIEW);
state.setAttribute(STATE_PAGESIZE, Integer.valueOf(DEFAULT_PAGE_SIZE));
state.removeAttribute(INTER_SIZE);
} // doSearch
public void doCancel(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
state.setAttribute(SiteHelper.SITE_PICKER_CANCELLED, Boolean.TRUE);
state.setAttribute(MODE, STATE_HELPER_DONE);
}
public void doSelect(RunData data, Context context)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
String siteId = data.getParameters().getString("siteId");
if (siteId != null && siteId.length() > 0) {
state.setAttribute(SiteHelper.SITE_PICKER_SITE_ID, siteId);
state.setAttribute(MODE, STATE_HELPER_DONE);
} else {
addAlert(state, rb.getString("list.not.selected"));
}
}
/**
* Return the url unchanged, unless it's a reference, then return the reference url
*/
private String convertReferenceUrl(String url)
{
// make a reference
Reference ref = EntityManager.newReference(url);
// if it didn't recognize this, return it unchanged
if (!ref.isKnownType()) return url;
// return the reference's url
return ref.getUrl();
} // convertReferenceUrl
protected void toolModeDispatch(String methodBase, String methodExt, HttpServletRequest req, HttpServletResponse res)
throws ToolException
{
ToolSession toolSession = SessionManager.getCurrentToolSession();
SessionState state = getState(req);
if (STATE_HELPER_DONE.equals(toolSession.getAttribute(MODE)))
{
String url = (String) SessionManager.getCurrentToolSession().getAttribute(Tool.HELPER_DONE_URL);
SessionManager.getCurrentToolSession().removeAttribute(Tool.HELPER_DONE_URL);
// TODO: Implement cleanup.
cleanup(state);
if (log.isDebugEnabled())
{
log.debug("Sending redirect to: "+ url);
}
try
{
res.sendRedirect(url);
}
catch (IOException e)
{
log.warn("Problem sending redirect to: "+ url, e);
}
return;
}
else if(sendToHelper(req, res, req.getPathInfo()))
{
return;
}
else
{
super.toolModeDispatch(methodBase, methodExt, req, res);
}
}
private void cleanup(SessionState state) {
List<String> names = (List<String>)state.getAttributeNames();
for (String name : names) {
if (name.startsWith(PREFIX)) {
log.debug("Removed attribute: "+ name);
state.removeAttribute(name);
}
}
}
} // SiteBrowserAction
| |
package com.usst.app.order.buy.rebate.action;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import com.usst.app.baseInfo.supplier.model.Supplier;
import com.usst.app.baseInfo.supplier.service.SupplierService;
import com.usst.app.component.serialNumber.service.SerialNumberService;
import com.usst.app.order.buy.rebate.model.Rebate;
import com.usst.app.order.buy.rebate.model.RebateItem;
import com.usst.app.order.buy.rebate.service.RebateItemService;
import com.usst.app.order.buy.rebate.service.RebateService;
import com.usst.app.system.user.model.SysUser;
import com.usst.code.struct.BaseAction;
import com.usst.code.util.PageInfo;
public class RebateAction extends BaseAction {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(RebateAction.class);
private Rebate rebate;
private RebateService rebateService;
private RebateItem rebateItem;
private List<RebateItem> rebateItemList;
private List<RebateItem> rebateItemListByState;
private List<Supplier> supplierList;
private SupplierService supplierService;
private RebateItemService rebateItemService;
private SerialNumberService serialNumberService;
public String list() {
return "list_rebate";
}
public String listJosn() {
logger.info("start list rebate");
List<Rebate> resultList = null;
int totalRows = 0;
try {
PageInfo pageInfo = createPageInfo();
if (this.rebate == null) {
this.rebate = new Rebate();
}
resultList = this.rebateService.pageList(pageInfo, this.rebate, true);
totalRows = pageInfo.getCount();
} catch (Exception e) {
logger.error("erro occur when list rebate", e);
}
if (resultList == null) {
resultList = new ArrayList();
}
this.jsonMap = new HashMap();
this.jsonMap.put("total", Integer.valueOf(totalRows));
this.jsonMap.put("rows", resultList);
logger.info("finish list all rebate");
return "success";
}
public String edit() {
SysUser loginMan = getSessionUserInfo();
if (this.rebate == null) {
this.rebate = new Rebate();
}
try {
if (StringUtils.isBlank(this.rebate.getId())) {
this.rebate.setState("c");
initModel(true, this.rebate, loginMan);
this.rebate.setHandlerId(loginMan.getId());
this.rebate.setHandlerName(loginMan.getName());
this.rebate.setDeptId(loginMan.getDeptId());
this.rebate.setDeptName(loginMan.getDeptName());
try {
String code = this.serialNumberService.getSerialNumberByDate("FL", "rebate");
this.rebate.setCode(code);
} catch (Exception e) {
logger.error("error occur when get code", e);
}
} else {
this.rebate = ((Rebate) this.rebateService.getModel(this.rebate.getId()));
initModel(false, this.rebate, loginMan);
}
} catch (Exception e) {
logger.error("error occur when list rebate", e);
}
return "edit_rebate";
}
public String itemEdit() {
SysUser loginMan = getSessionUserInfo();
if (this.rebate == null) {
this.rebate = new Rebate();
}
try {
this.rebate = ((Rebate) this.rebateService.getModel(this.rebate.getId()));
initModel(false, this.rebate, loginMan);
this.rebateItemList = this.rebateItemService.getRebateItem(this.rebate.getId());
if (this.rebateItemList.size() < 1) {
this.supplierList = this.supplierService.getSupplierList();
if (this.supplierList.size() > 0) {
for (int i = 0; i < this.supplierList.size(); i++) {
List resultList = null;
Double saleMoney = Double.valueOf(0.0D);
String supplierId = ((Supplier) this.supplierList.get(i)).getId();
if (StringUtils.isNotBlank(supplierId)) {
resultList = this.rebateItemService.getTotalMoney(supplierId, this.rebate.getStartTime(),
this.rebate.getEndTime());
if (resultList.get(0) != null) {
saleMoney = (Double) resultList.get(0);
}
if (this.rebateItem == null) {
this.rebateItem = new RebateItem();
}
this.rebateItem.setState("c");
initModel(true, this.rebateItem, loginMan);
this.rebateItem.setSupplierId(supplierId);
this.rebateItem.setSupplierName(((Supplier) this.supplierList.get(i)).getName());
this.rebateItem.setSaleMoney(saleMoney);
this.rebateItem.setId(this.rebateItemService.makeId());
this.rebateItem.setRebateId(this.rebate.getId());
this.rebateItem.setSort(Integer.valueOf(i + 1));
this.rebateItemService.insert(this.rebateItem);
}
}
}
}
} catch (Exception e) {
logger.error("error occur when list rebate", e);
}
return "itemEdit_rebate";
}
public String itemShow() {
SysUser loginMan = getSessionUserInfo();
if (this.rebate == null) {
this.rebate = new Rebate();
}
try {
this.rebate = ((Rebate) this.rebateService.getModel(this.rebate.getId()));
initModel(false, this.rebate, loginMan);
} catch (Exception e) {
logger.error("error occur when list rebate", e);
}
return "show_rebate";
}
public void save() {
logger.info("start to update rebate information");
try {
if (this.rebate == null) {
this.rebate = new Rebate();
}
if (StringUtils.isBlank(this.rebate.getId())) {
this.rebate.setId(this.rebateService.makeId());
this.rebateService.insert(this.rebate);
responseFlag(true);
} else {
this.rebateItemList = this.rebateItemService.getRebateItem(this.rebate.getId());
this.rebateItemListByState = this.rebateItemService.getRebateItemByState(this.rebate.getId());
if (this.rebateItemList.size() < 1) {
responseFlag("id");
} else if (this.rebateItemListByState.size() > 0) {
responseFlag("state");
} else {
this.rebateService.update(this.rebate);
responseFlag(true);
}
}
} catch (Exception e) {
logger.info("error occur when save rebate information", e);
e.printStackTrace();
responseFlag(false);
}
}
public void delete() {
SysUser loginMan = getSessionUserInfo();
try {
if (this.rebate == null) {
this.rebate = new Rebate();
}
this.rebateItemService.deleteByIntoId(this.rebate.getId());
this.rebateService.delete(this.rebate.getId());
logger.info(loginMan.getCode() + "delete rebate,id:" + this.rebate.getId());
responseFlag(true);
} catch (Exception e) {
responseFlag(false);
logger.info("error occur when delete a supplier", e);
}
}
public Rebate getRebate() {
return this.rebate;
}
public void setRebate(Rebate rebate) {
this.rebate = rebate;
}
public RebateItem getRebateItem() {
return this.rebateItem;
}
public void setRebateItem(RebateItem rebateItem) {
this.rebateItem = rebateItem;
}
public List<RebateItem> getRebateItemList() {
return this.rebateItemList;
}
public void setRebateItemList(List<RebateItem> rebateItemList) {
this.rebateItemList = rebateItemList;
}
public List<RebateItem> getRebateItemListByState() {
return this.rebateItemListByState;
}
public void setRebateItemListByState(List<RebateItem> rebateItemListByState) {
this.rebateItemListByState = rebateItemListByState;
}
public List<Supplier> getSupplierList() {
return this.supplierList;
}
public void setSupplierList(List<Supplier> supplierList) {
this.supplierList = supplierList;
}
public void setRebateService(RebateService rebateService) {
this.rebateService = rebateService;
}
public void setSupplierService(SupplierService supplierService) {
this.supplierService = supplierService;
}
public void setRebateItemService(RebateItemService rebateItemService) {
this.rebateItemService = rebateItemService;
}
public void setSerialNumberService(SerialNumberService serialNumberService) {
this.serialNumberService = serialNumberService;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core.construction;
import static org.apache.beam.runners.core.construction.PTransformTranslation.COMBINE_GLOBALLY_TRANSFORM_URN;
import static org.apache.beam.runners.core.construction.PTransformTranslation.COMBINE_GROUPED_VALUES_TRANSFORM_URN;
import static org.apache.beam.runners.core.construction.PTransformTranslation.COMBINE_PER_KEY_TRANSFORM_URN;
import com.google.auto.service.AutoService;
import java.io.IOException;
import java.util.Map;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.model.pipeline.v1.RunnerApi.CombinePayload;
import org.apache.beam.model.pipeline.v1.RunnerApi.FunctionSpec;
import org.apache.beam.runners.core.construction.PTransformTranslation.TransformPayloadTranslator;
import org.apache.beam.sdk.coders.CannotProvideCoderException;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.IterableCoder;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.CombineFnBase.GlobalCombineFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.util.SerializableUtils;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
/**
* Methods for translating between {@link Combine} {@link PTransform PTransforms} and {@link
* RunnerApi.CombinePayload} protos.
*/
public class CombineTranslation {
static final String JAVA_SERIALIZED_COMBINE_FN_URN = "beam:combinefn:javasdk:v1";
/** A {@link TransformPayloadTranslator} for {@link Combine.PerKey}. */
public static class CombinePerKeyPayloadTranslator
implements PTransformTranslation.TransformPayloadTranslator<Combine.PerKey<?, ?, ?>> {
private CombinePerKeyPayloadTranslator() {}
@Override
public String getUrn(Combine.PerKey<?, ?, ?> transform) {
return COMBINE_PER_KEY_TRANSFORM_URN;
}
@Override
public FunctionSpec translate(
AppliedPTransform<?, ?, Combine.PerKey<?, ?, ?>> transform, SdkComponents components)
throws IOException {
if (transform.getTransform().getSideInputs().isEmpty()) {
GlobalCombineFn<?, ?, ?> combineFn = transform.getTransform().getFn();
Coder<?> accumulatorCoder =
extractAccumulatorCoder(combineFn, (AppliedPTransform) transform);
return FunctionSpec.newBuilder()
.setUrn(getUrn(transform.getTransform()))
.setPayload(combinePayload(combineFn, accumulatorCoder, components).toByteString())
.build();
} else {
// Combines with side inputs are translated as generic composites, which have a blank
// FunctionSpec.
return null;
}
}
private static <K, InputT, AccumT> Coder<AccumT> extractAccumulatorCoder(
GlobalCombineFn<InputT, AccumT, ?> combineFn,
AppliedPTransform<PCollection<KV<K, InputT>>, ?, Combine.PerKey<K, InputT, ?>> transform)
throws IOException {
try {
@SuppressWarnings("unchecked")
PCollection<KV<K, InputT>> mainInput =
(PCollection<KV<K, InputT>>)
Iterables.getOnlyElement(TransformInputs.nonAdditionalInputs(transform));
return combineFn.getAccumulatorCoder(
transform.getPipeline().getCoderRegistry(),
((KvCoder<K, InputT>) mainInput.getCoder()).getValueCoder());
} catch (CannotProvideCoderException e) {
throw new IOException("Could not obtain a Coder for the accumulator", e);
}
}
}
/** A {@link TransformPayloadTranslator} for {@link Combine.Globally}. */
public static class CombineGloballyPayloadTranslator
implements PTransformTranslation.TransformPayloadTranslator<Combine.Globally<?, ?>> {
private CombineGloballyPayloadTranslator() {}
@Override
public String getUrn(Combine.Globally<?, ?> transform) {
return COMBINE_GLOBALLY_TRANSFORM_URN;
}
@Override
public FunctionSpec translate(
AppliedPTransform<?, ?, Combine.Globally<?, ?>> transform, SdkComponents components)
throws IOException {
if (transform.getTransform().getSideInputs().isEmpty()) {
return FunctionSpec.newBuilder()
.setUrn(getUrn(transform.getTransform()))
.setPayload(
payloadForCombineGlobally((AppliedPTransform) transform, components).toByteString())
.build();
} else {
// Combines with side inputs are translated as generic composites, which have a blank
// FunctionSpec.
return null;
}
}
private static <InputT, AccumT> Coder<AccumT> extractAccumulatorCoder(
GlobalCombineFn<InputT, AccumT, ?> combineFn,
AppliedPTransform<PCollection<InputT>, ?, Combine.Globally<InputT, ?>> transform)
throws IOException {
try {
@SuppressWarnings("unchecked")
PCollection<InputT> mainInput =
(PCollection<InputT>)
Iterables.getOnlyElement(TransformInputs.nonAdditionalInputs(transform));
return combineFn.getAccumulatorCoder(
transform.getPipeline().getCoderRegistry(), mainInput.getCoder());
} catch (CannotProvideCoderException e) {
throw new IOException("Could not obtain a Coder for the accumulator", e);
}
}
/** Produces a {@link RunnerApi.CombinePayload} from a {@link Combine.Globally}. */
@VisibleForTesting
static <InputT, OutputT> CombinePayload payloadForCombineGlobally(
final AppliedPTransform<
PCollection<InputT>, PCollection<OutputT>, Combine.Globally<InputT, OutputT>>
transform,
final SdkComponents components)
throws IOException {
GlobalCombineFn<?, ?, ?> combineFn = transform.getTransform().getFn();
Coder<?> accumulatorCoder = extractAccumulatorCoder(combineFn, (AppliedPTransform) transform);
return combinePayload(combineFn, accumulatorCoder, components);
}
}
/** A {@link TransformPayloadTranslator} for {@link Combine.GroupedValues}. */
public static class CombineGroupedValuesPayloadTranslator
implements PTransformTranslation.TransformPayloadTranslator<Combine.GroupedValues<?, ?, ?>> {
private CombineGroupedValuesPayloadTranslator() {}
@Override
public String getUrn(Combine.GroupedValues<?, ?, ?> transform) {
return COMBINE_GROUPED_VALUES_TRANSFORM_URN;
}
@Override
public FunctionSpec translate(
AppliedPTransform<?, ?, Combine.GroupedValues<?, ?, ?>> transform, SdkComponents components)
throws IOException {
if (transform.getTransform().getSideInputs().isEmpty()) {
GlobalCombineFn<?, ?, ?> combineFn = transform.getTransform().getFn();
Coder<?> accumulatorCoder =
extractAccumulatorCoder(combineFn, (AppliedPTransform) transform);
return FunctionSpec.newBuilder()
.setUrn(getUrn(transform.getTransform()))
.setPayload(combinePayload(combineFn, accumulatorCoder, components).toByteString())
.build();
} else {
// Combines with side inputs are translated as generic composites, which have a blank
// FunctionSpec.
return null;
}
}
private static <K, InputT, AccumT> Coder<AccumT> extractAccumulatorCoder(
GlobalCombineFn<InputT, AccumT, ?> combineFn,
AppliedPTransform<
PCollection<KV<K, Iterable<InputT>>>, ?, Combine.GroupedValues<K, InputT, ?>>
transform)
throws IOException {
try {
@SuppressWarnings("unchecked")
PCollection<KV<K, Iterable<InputT>>> mainInput =
(PCollection<KV<K, Iterable<InputT>>>)
Iterables.getOnlyElement(TransformInputs.nonAdditionalInputs(transform));
KvCoder<K, Iterable<InputT>> kvCoder = (KvCoder<K, Iterable<InputT>>) mainInput.getCoder();
IterableCoder<InputT> iterCoder = (IterableCoder<InputT>) kvCoder.getValueCoder();
return combineFn.getAccumulatorCoder(
transform.getPipeline().getCoderRegistry(), iterCoder.getElemCoder());
} catch (CannotProvideCoderException e) {
throw new IOException("Could not obtain a Coder for the accumulator", e);
}
}
}
/**
* Registers {@link TransformPayloadTranslator TransformPayloadTranslators} for {@link Combine
* Combines}.
*/
@AutoService(TransformPayloadTranslatorRegistrar.class)
public static class Registrar implements TransformPayloadTranslatorRegistrar {
@Override
public Map<? extends Class<? extends PTransform>, ? extends TransformPayloadTranslator>
getTransformPayloadTranslators() {
return ImmutableMap.<Class<? extends PTransform>, TransformPayloadTranslator>builder()
.put(Combine.Globally.class, new CombineGloballyPayloadTranslator())
.put(Combine.GroupedValues.class, new CombineGroupedValuesPayloadTranslator())
.put(Combine.PerKey.class, new CombinePerKeyPayloadTranslator())
.build();
}
}
/** Produces a {@link RunnerApi.CombinePayload} from a {@link GlobalCombineFn}. */
private static CombinePayload combinePayload(
GlobalCombineFn<?, ?, ?> combineFn, Coder<?> accumulatorCoder, final SdkComponents components)
throws IOException {
return RunnerApi.CombinePayload.newBuilder()
.setAccumulatorCoderId(components.registerCoder(accumulatorCoder))
.setCombineFn(toProto(combineFn, components))
.build();
}
public static FunctionSpec toProto(GlobalCombineFn<?, ?, ?> combineFn, SdkComponents components) {
return FunctionSpec.newBuilder()
.setUrn(JAVA_SERIALIZED_COMBINE_FN_URN)
.setPayload(ByteString.copyFrom(SerializableUtils.serializeToByteArray(combineFn)))
.build();
}
}
| |
package jp.gcreate.plugins.adbfriendly.ui;
import com.android.ddmlib.AndroidDebugBridge;
import com.android.ddmlib.IDevice;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.ValidationInfo;
import jp.gcreate.plugins.adbfriendly.adb.AdbConnector;
import jp.gcreate.plugins.adbfriendly.funciton.DeviceScreenRolling;
import jp.gcreate.plugins.adbfriendly.funciton.FriendlyFunctions;
import jp.gcreate.plugins.adbfriendly.funciton.FunctionsCallback;
import jp.gcreate.plugins.adbfriendly.funciton.FunctionsManager;
import jp.gcreate.plugins.adbfriendly.util.Logger;
import jp.gcreate.plugins.adbfriendly.util.PluginConfig;
import jp.gcreate.plugins.adbfriendly.util.ShellCommand;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
/*
* ADB Friendly
* Copyright 2016 gen0083
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class FunctionsForm extends DialogWrapper implements FunctionsCallback, AndroidDebugBridge.IDeviceChangeListener,
AndroidDebugBridge.IDebugBridgeChangeListener {
private JTextField rollingCount;
private JCheckBox showProgressCheckBox;
private JList devicesList;
private JLabel notifyDevicesNotFound;
private JPanel menuWindow;
private JLabel notifyAlreadyRunning;
private JPanel adbConnectedPanel;
private JButton adbConnectButton;
private DefaultListModel connectedDevicesModel;
private Project project;
public FunctionsForm(AnActionEvent event) {
super(event.getProject());
setTitle("ADB Friendly");
project = event.getProject();
connectedDevicesModel = new DefaultListModel();
devicesList.setCellRenderer(new DevicesListRenderer());
adbConnectButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
SetAdbPathForm pathForm = new SetAdbPathForm(project);
pathForm.show();
}
});
setListenerOnLaunch();
checkAdbConnection();
bindDevicesToList();
checkRunningTaskExist();
restorePreviousState();
init();
}
private void setListenerOnLaunch() {
AdbConnector.INSTANCE.addDeviceChangeListener(this);
AdbConnector.INSTANCE.addBridgeChangedListener(this);
FunctionsManager.INSTANCE.addFunctionsCallback(this);
}
public void removeListenersOnExit() {
AdbConnector.INSTANCE.removeDeviceChangedListener(this);
AdbConnector.INSTANCE.removeDeviceChangedListener(this);
FunctionsManager.INSTANCE.removeFunctionsCallbacks(this);
}
@Override
protected JComponent createCenterPanel() {
return menuWindow;
}
private void checkAdbConnection() {
boolean connected = AdbConnector.INSTANCE.isAdbConnected();
adbConnectedPanel.setVisible(!connected);
if (!connected) {
String path = PluginConfig.INSTANCE.getAdbPath();
if (path.equals("")) {
path = new ShellCommand().executeCommand("which adb");
path = path.trim();
}
if (!path.equals("") && !path.contains("timeout")) {
AdbConnector.INSTANCE.connectAdbWithPath(path);
}
}
}
private void restorePreviousState() {
int count = PluginConfig.INSTANCE.getRotateCount();
if (count > 0) {
rollingCount.setText(Integer.toString(count));
rollingCount.invalidate();
}
boolean showProgress = PluginConfig.INSTANCE.getShowProgress();
if (showProgress) {
showProgressCheckBox.setSelected(true);
showProgressCheckBox.invalidate();
}
}
private void bindDevicesToList() {
IDevice devices[] = AdbConnector.INSTANCE.getDevices();
boolean noDevices = (devices == null || devices.length == 0);
notifyDevicesNotFound.setVisible(noDevices);
notifyDevicesNotFound.invalidate();
int selected = -1;
int i = 0;
connectedDevicesModel.clear();
if (!noDevices) {
String previousSerial = PluginConfig.INSTANCE.getDeviceSerial();
connectedDevicesModel.clear();
for (IDevice device : devices) {
if (device.getSerialNumber()
.equals(previousSerial)) {
selected = i;
}
connectedDevicesModel.addElement(device);
i++;
}
}
devicesList.setModel(connectedDevicesModel);
devicesList.setSelectedIndex(selected);
devicesList.invalidate();
}
private void checkRunningTaskExist() {
FriendlyFunctions currentFunction = FunctionsManager.INSTANCE.getRunningFunctionOrNull();
boolean isRunning = currentFunction != null;
// If running functions now then set disable buttons which to run functions.
setOKActionEnabled(!isRunning);
notifyAlreadyRunning.setVisible(isRunning);
notifyAlreadyRunning.invalidate();
}
@Override
protected void doOKAction() {
int index = devicesList.getSelectedIndex();
IDevice device = (IDevice) connectedDevicesModel.getElementAt(index);
int count = Integer.parseInt(rollingCount.getText());
if (device != null && device.isOnline()) {
FunctionsManager.INSTANCE.startFunction(new DeviceScreenRolling(device, FunctionsManager.INSTANCE, count, showProgressCheckBox.isSelected()));
PluginConfig.INSTANCE.setDeviceSerial(device.getSerialNumber());
PluginConfig.INSTANCE.setRotateCount(count);
PluginConfig.INSTANCE.setShowProgress(showProgressCheckBox.isSelected());
PluginConfig.INSTANCE.save();
}
dispose();
}
@Override
protected ValidationInfo doValidate() {
Logger.d(this, "validation start");
if (devicesList.getSelectedIndex() == -1) {
return new ValidationInfo("Select a target device.", devicesList);
}
try {
int count = Integer.parseInt(rollingCount.getText());
if (count <= 0) {
return new ValidationInfo("Rotating count must be beggar than 0.", rollingCount);
}
} catch (NumberFormatException e) {
return new ValidationInfo("Rotating count must be digit.", rollingCount);
}
return null;
}
/**
* When device status changed, these callbacks are called.
*
* @param device connected device
*/
@Override
public void deviceConnected(IDevice device) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
bindDevicesToList();
}
});
}
@Override
public void deviceDisconnected(IDevice device) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
bindDevicesToList();
checkAdbConnection();
}
});
}
@Override
public void deviceChanged(IDevice device, int changeMask) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
bindDevicesToList();
}
});
}
/**
* If functions are finished then these callbacks are called from FunctionsManager.
* The callbacks notify that FunctionsManager can start a next function.
*/
@Override
public void onDone() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
checkRunningTaskExist();
}
});
}
@Override
public void onErred() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
checkRunningTaskExist();
}
});
}
@Override
public void onCancelled() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
checkRunningTaskExist();
}
});
}
@Override
public void bridgeChanged(AndroidDebugBridge bridge) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
checkAdbConnection();
bindDevicesToList();
}
});
}
}
| |
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
public class TestReplication {
private static final Log LOG = LogFactory.getLog(TestReplication.class);
private static Configuration conf1;
private static Configuration conf2;
/*
private static ZooKeeperWrapper zkw1;
private static ZooKeeperWrapper zkw2;
*/
private static HTable htable1;
private static HTable htable2;
private static HBaseTestingUtility utility1;
private static HBaseTestingUtility utility2;
private static final int NB_ROWS_IN_BATCH = 100;
private static final long SLEEP_TIME = 500;
private static final int NB_RETRIES = 10;
private static final byte[] tableName = Bytes.toBytes("test");
private static final byte[] famName = Bytes.toBytes("f");
private static final byte[] row = Bytes.toBytes("row");
private static final byte[] noRepfamName = Bytes.toBytes("norep");
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf1 = HBaseConfiguration.create();
conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
// smaller block size and capacity to trigger more operations
// and test them
conf1.setInt("hbase.regionserver.hlog.blocksize", 1024*20);
conf1.setInt("replication.source.size.capacity", 1024);
conf1.setLong("replication.source.sleepforretries", 100);
conf1.setInt("hbase.regionserver.maxlogs", 10);
conf1.setLong("hbase.master.logcleaner.ttl", 10);
conf1.setLong("hbase.client.retries.number", 5);
conf1.setLong("hbase.regions.percheckin", 1);
conf1.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true);
conf1.setBoolean("dfs.support.append", true);
conf1.setLong(HConstants.THREAD_WAKE_FREQUENCY, 100);
/* REENALBE
utility1 = new HBaseTestingUtility(conf1);
utility1.startMiniZKCluster();
MiniZooKeeperCluster miniZK = utility1.getZkCluster();
zkw1 = ZooKeeperWrapper.createInstance(conf1, "cluster1");
zkw1.writeZNode("/1", "replication", "");
zkw1.writeZNode("/1/replication", "master",
conf1.get(HConstants.ZOOKEEPER_QUORUM)+":" +
conf1.get("hbase.zookeeper.property.clientPort")+":/1");
setIsReplication(true);
LOG.info("Setup first Zk");
conf2 = HBaseConfiguration.create();
conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2");
conf2.setInt("hbase.client.retries.number", 6);
conf2.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true);
conf2.setBoolean("dfs.support.append", true);
conf2.setLong("hbase.regions.percheckin", 1);
utility2 = new HBaseTestingUtility(conf2);
utility2.setZkCluster(miniZK);
zkw2 = ZooKeeperWrapper.createInstance(conf2, "cluster2");
zkw2.writeZNode("/2", "replication", "");
zkw2.writeZNode("/2/replication", "master",
conf1.get(HConstants.ZOOKEEPER_QUORUM)+":" +
conf1.get("hbase.zookeeper.property.clientPort")+":/1");
zkw1.writeZNode("/1/replication/peers", "1",
conf2.get(HConstants.ZOOKEEPER_QUORUM)+":" +
conf2.get("hbase.zookeeper.property.clientPort")+":/2");
LOG.info("Setup second Zk");
utility1.startMiniCluster(2);
utility2.startMiniCluster(2);
HTableDescriptor table = new HTableDescriptor(tableName);
table.setDeferredLogFlush(false);
HColumnDescriptor fam = new HColumnDescriptor(famName);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
table.addFamily(fam);
fam = new HColumnDescriptor(noRepfamName);
table.addFamily(fam);
HBaseAdmin admin1 = new HBaseAdmin(conf1);
HBaseAdmin admin2 = new HBaseAdmin(conf2);
admin1.createTable(table);
admin2.createTable(table);
htable1 = new HTable(conf1, tableName);
htable1.setWriteBufferSize(1024);
htable2 = new HTable(conf2, tableName);
*/
}
private static void setIsReplication(boolean rep) throws Exception {
LOG.info("Set rep " + rep);
// REENALBE zkw1.writeZNode("/1/replication", "state", Boolean.toString(rep));
// Takes some ms for ZK to fire the watcher
Thread.sleep(SLEEP_TIME);
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
setIsReplication(false);
utility1.truncateTable(tableName);
utility2.truncateTable(tableName);
// If test is flaky, set that sleep higher
Thread.sleep(SLEEP_TIME*8);
setIsReplication(true);
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
/* REENABLE
utility2.shutdownMiniCluster();
utility1.shutdownMiniCluster();
*/
}
/**
* Add a row, check it's replicated, delete it, check's gone
* @throws Exception
*/
@Ignore @Test
public void testSimplePutDelete() throws Exception {
LOG.info("testSimplePutDelete");
Put put = new Put(row);
put.add(famName, row, row);
htable1 = new HTable(conf1, tableName);
htable1.put(put);
HTable table2 = new HTable(conf2, tableName);
Get get = new Get(row);
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for put replication");
}
Result res = table2.get(get);
if (res.size() == 0) {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME);
} else {
assertArrayEquals(res.value(), row);
break;
}
}
Delete del = new Delete(row);
htable1.delete(del);
table2 = new HTable(conf2, tableName);
get = new Get(row);
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for del replication");
}
Result res = table2.get(get);
if (res.size() >= 1) {
LOG.info("Row not deleted");
Thread.sleep(SLEEP_TIME);
} else {
break;
}
}
}
/**
* Try a small batch upload using the write buffer, check it's replicated
* @throws Exception
*/
@Ignore @Test
public void testSmallBatch() throws Exception {
LOG.info("testSmallBatch");
Put put;
// normal Batch tests
htable1.setAutoFlush(false);
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
put = new Put(Bytes.toBytes(i));
put.add(famName, row, row);
htable1.put(put);
}
htable1.flushCommits();
Scan scan = new Scan();
ResultScanner scanner1 = htable1.getScanner(scan);
Result[] res1 = scanner1.next(NB_ROWS_IN_BATCH);
scanner1.close();
assertEquals(NB_ROWS_IN_BATCH, res1.length);
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for normal batch replication");
}
ResultScanner scanner = htable2.getScanner(scan);
Result[] res = scanner.next(NB_ROWS_IN_BATCH);
scanner.close();
if (res.length != NB_ROWS_IN_BATCH) {
LOG.info("Only got " + res.length + " rows");
Thread.sleep(SLEEP_TIME);
} else {
break;
}
}
htable1.setAutoFlush(true);
}
/**
* Test stopping replication, trying to insert, make sure nothing's
* replicated, enable it, try replicating and it should work
* @throws Exception
*/
@Ignore @Test
public void testStartStop() throws Exception {
// Test stopping replication
setIsReplication(false);
Put put = new Put(Bytes.toBytes("stop start"));
put.add(famName, row, row);
htable1.put(put);
Get get = new Get(Bytes.toBytes("stop start"));
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
break;
}
Result res = htable2.get(get);
if(res.size() >= 1) {
fail("Replication wasn't stopped");
} else {
LOG.info("Row not replicated, let's wait a bit more...");
Thread.sleep(SLEEP_TIME);
}
}
// Test restart replication
setIsReplication(true);
htable1.put(put);
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for put replication");
}
Result res = htable2.get(get);
if(res.size() == 0) {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME);
} else {
assertArrayEquals(res.value(), row);
break;
}
}
put = new Put(Bytes.toBytes("do not rep"));
put.add(noRepfamName, row, row);
htable1.put(put);
get = new Get(Bytes.toBytes("do not rep"));
for (int i = 0; i < NB_RETRIES; i++) {
if (i == NB_RETRIES-1) {
break;
}
Result res = htable2.get(get);
if (res.size() >= 1) {
fail("Not supposed to be replicated");
} else {
LOG.info("Row not replicated, let's wait a bit more...");
Thread.sleep(SLEEP_TIME);
}
}
}
/**
* Do a more intense version testSmallBatch, one that will trigger
* hlog rolling and other non-trivial code paths
* @throws Exception
*/
@Ignore @Test
public void loadTesting() throws Exception {
htable1.setWriteBufferSize(1024);
htable1.setAutoFlush(false);
for (int i = 0; i < NB_ROWS_IN_BATCH *10; i++) {
Put put = new Put(Bytes.toBytes(i));
put.add(famName, row, row);
htable1.put(put);
}
htable1.flushCommits();
Scan scan = new Scan();
ResultScanner scanner = htable1.getScanner(scan);
Result[] res = scanner.next(NB_ROWS_IN_BATCH * 100);
scanner.close();
assertEquals(NB_ROWS_IN_BATCH *10, res.length);
scan = new Scan();
for (int i = 0; i < NB_RETRIES; i++) {
scanner = htable2.getScanner(scan);
res = scanner.next(NB_ROWS_IN_BATCH * 100);
scanner.close();
if (res.length != NB_ROWS_IN_BATCH *10) {
if (i == NB_RETRIES-1) {
int lastRow = -1;
for (Result result : res) {
int currentRow = Bytes.toInt(result.getRow());
for (int row = lastRow+1; row < currentRow; row++) {
LOG.error("Row missing: " + row);
}
lastRow = currentRow;
}
LOG.error("Last row: " + lastRow);
fail("Waited too much time for normal batch replication, "
+ res.length + " instead of " + NB_ROWS_IN_BATCH *10);
} else {
LOG.info("Only got " + res.length + " rows");
Thread.sleep(SLEEP_TIME);
}
} else {
break;
}
}
}
/**
* Load up multiple tables over 2 region servers and kill a source during
* the upload. The failover happens internally.
* @throws Exception
*/
@Ignore @Test
public void queueFailover() throws Exception {
utility1.createMultiRegions(htable1, famName);
// killing the RS with .META. can result into failed puts until we solve
// IO fencing
int rsToKill1 =
utility1.getHBaseCluster().getServerWithMeta() == 0 ? 1 : 0;
int rsToKill2 =
utility2.getHBaseCluster().getServerWithMeta() == 0 ? 1 : 0;
// Takes about 20 secs to run the full loading, kill around the middle
Thread killer1 = killARegionServer(utility1, 7500, rsToKill1);
Thread killer2 = killARegionServer(utility2, 10000, rsToKill2);
LOG.info("Start loading table");
int initialCount = utility1.loadTable(htable1, famName);
LOG.info("Done loading table");
killer1.join(5000);
killer2.join(5000);
LOG.info("Done waiting for threads");
Result[] res;
while (true) {
try {
Scan scan = new Scan();
ResultScanner scanner = htable1.getScanner(scan);
res = scanner.next(initialCount);
scanner.close();
break;
} catch (UnknownScannerException ex) {
LOG.info("Cluster wasn't ready yet, restarting scanner");
}
}
// Test we actually have all the rows, we may miss some because we
// don't have IO fencing.
if (res.length != initialCount) {
LOG.warn("We lost some rows on the master cluster!");
// We don't really expect the other cluster to have more rows
initialCount = res.length;
}
Scan scan2 = new Scan();
int lastCount = 0;
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for queueFailover replication");
}
ResultScanner scanner2 = htable2.getScanner(scan2);
Result[] res2 = scanner2.next(initialCount * 2);
scanner2.close();
if (res2.length < initialCount) {
if (lastCount < res2.length) {
i--; // Don't increment timeout if we make progress
}
lastCount = res2.length;
LOG.info("Only got " + lastCount + " rows instead of " +
initialCount + " current i=" + i);
Thread.sleep(SLEEP_TIME*2);
} else {
break;
}
}
}
private static Thread killARegionServer(final HBaseTestingUtility utility,
final long timeout, final int rs) {
Thread killer = new Thread() {
public void run() {
try {
Thread.sleep(timeout);
utility.expireRegionServerSession(rs);
} catch (Exception e) {
LOG.error(e);
}
}
};
killer.start();
return killer;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.metastore;
import com.facebook.presto.hive.ForHiveMetastore;
import com.facebook.presto.hive.HiveClientConfig;
import com.facebook.presto.hive.HiveCluster;
import com.facebook.presto.hive.HiveMetastoreClient;
import com.facebook.presto.hive.HiveViewNotSupportedException;
import com.facebook.presto.hive.TableAlreadyExistsException;
import com.facebook.presto.hive.util.BackgroundCacheLoader;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.UncheckedExecutionException;
import io.airlift.units.Duration;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.thrift.TException;
import org.weakref.jmx.Flatten;
import org.weakref.jmx.Managed;
import javax.annotation.concurrent.ThreadSafe;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_METASTORE_ERROR;
import static com.facebook.presto.hive.HiveUtil.PRESTO_VIEW_FLAG;
import static com.facebook.presto.hive.HiveUtil.isPrestoView;
import static com.facebook.presto.hive.RetryDriver.retry;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.transform;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS;
/**
* Hive Metastore Cache
*/
@ThreadSafe
public class CachingHiveMetastore
implements HiveMetastore
{
private final CachingHiveMetastoreStats stats = new CachingHiveMetastoreStats();
protected final HiveCluster clientProvider;
private final LoadingCache<String, List<String>> databaseNamesCache;
private final LoadingCache<String, Database> databaseCache;
private final LoadingCache<String, List<String>> tableNamesCache;
private final LoadingCache<String, List<String>> viewNamesCache;
private final LoadingCache<HiveTableName, List<String>> partitionNamesCache;
private final LoadingCache<HiveTableName, Table> tableCache;
private final LoadingCache<HivePartitionName, Partition> partitionCache;
private final LoadingCache<PartitionFilter, List<String>> partitionFilterCache;
@Inject
public CachingHiveMetastore(HiveCluster hiveCluster, @ForHiveMetastore ExecutorService executor, HiveClientConfig hiveClientConfig)
{
this(checkNotNull(hiveCluster, "hiveCluster is null"),
checkNotNull(executor, "executor is null"),
checkNotNull(hiveClientConfig, "hiveClientConfig is null").getMetastoreCacheTtl(),
hiveClientConfig.getMetastoreRefreshInterval());
}
public CachingHiveMetastore(HiveCluster hiveCluster, ExecutorService executor, Duration cacheTtl, Duration refreshInterval)
{
this.clientProvider = checkNotNull(hiveCluster, "hiveCluster is null");
long expiresAfterWriteMillis = checkNotNull(cacheTtl, "cacheTtl is null").toMillis();
long refreshMills = checkNotNull(refreshInterval, "refreshInterval is null").toMillis();
ListeningExecutorService listeningExecutor = MoreExecutors.listeningDecorator(executor);
databaseNamesCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<String, List<String>>(listeningExecutor)
{
@Override
public List<String> load(String key)
throws Exception
{
return loadAllDatabases();
}
});
databaseCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<String, Database>(listeningExecutor)
{
@Override
public Database load(String databaseName)
throws Exception
{
return loadDatabase(databaseName);
}
});
tableNamesCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<String, List<String>>(listeningExecutor)
{
@Override
public List<String> load(String databaseName)
throws Exception
{
return loadAllTables(databaseName);
}
});
tableCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<HiveTableName, Table>(listeningExecutor)
{
@Override
public Table load(HiveTableName hiveTableName)
throws Exception
{
return loadTable(hiveTableName);
}
});
viewNamesCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<String, List<String>>(listeningExecutor)
{
@Override
public List<String> load(String databaseName)
throws Exception
{
return loadAllViews(databaseName);
}
});
partitionNamesCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<HiveTableName, List<String>>(listeningExecutor)
{
@Override
public List<String> load(HiveTableName hiveTableName)
throws Exception
{
return loadPartitionNames(hiveTableName);
}
});
partitionFilterCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<PartitionFilter, List<String>>(listeningExecutor)
{
@Override
public List<String> load(PartitionFilter partitionFilter)
throws Exception
{
return loadPartitionNamesByParts(partitionFilter);
}
});
partitionCache = CacheBuilder.newBuilder()
.expireAfterWrite(expiresAfterWriteMillis, MILLISECONDS)
.refreshAfterWrite(refreshMills, MILLISECONDS)
.build(new BackgroundCacheLoader<HivePartitionName, Partition>(listeningExecutor)
{
@Override
public Partition load(HivePartitionName partitionName)
throws Exception
{
return loadPartitionByName(partitionName);
}
@Override
public Map<HivePartitionName, Partition> loadAll(Iterable<? extends HivePartitionName> partitionNames)
throws Exception
{
return loadPartitionsByNames(partitionNames);
}
});
}
@Managed
@Flatten
public CachingHiveMetastoreStats getStats()
{
return stats;
}
@Override
@Managed
public void flushCache()
{
databaseNamesCache.invalidateAll();
tableNamesCache.invalidateAll();
viewNamesCache.invalidateAll();
partitionNamesCache.invalidateAll();
databaseCache.invalidateAll();
tableCache.invalidateAll();
partitionCache.invalidateAll();
partitionFilterCache.invalidateAll();
}
private static <K, V, E extends Exception> V get(LoadingCache<K, V> cache, K key, Class<E> exceptionClass)
throws E
{
try {
return cache.get(key);
}
catch (ExecutionException | UncheckedExecutionException e) {
Throwable t = e.getCause();
Throwables.propagateIfInstanceOf(t, exceptionClass);
throw Throwables.propagate(t);
}
}
private static <K, V, E extends Exception> Map<K, V> getAll(LoadingCache<K, V> cache, Iterable<K> keys, Class<E> exceptionClass)
throws E
{
try {
return cache.getAll(keys);
}
catch (ExecutionException | UncheckedExecutionException e) {
Throwable t = e.getCause();
Throwables.propagateIfInstanceOf(t, exceptionClass);
throw Throwables.propagate(t);
}
}
@Override
public List<String> getAllDatabases()
{
return get(databaseNamesCache, "", RuntimeException.class);
}
private List<String> loadAllDatabases()
throws Exception
{
try {
return retry()
.stopOnIllegalExceptions()
.run("getAllDatabases", stats.getGetAllDatabases().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
return client.get_all_databases();
}
}));
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
@Override
public Database getDatabase(String databaseName)
throws NoSuchObjectException
{
return get(databaseCache, databaseName, NoSuchObjectException.class);
}
private Database loadDatabase(final String databaseName)
throws Exception
{
try {
return retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("getDatabase", stats.getGetDatabase().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
return client.get_database(databaseName);
}
}));
}
catch (NoSuchObjectException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
@Override
public List<String> getAllTables(String databaseName)
throws NoSuchObjectException
{
return get(tableNamesCache, databaseName, NoSuchObjectException.class);
}
private List<String> loadAllTables(final String databaseName)
throws Exception
{
final Callable<List<String>> getAllTables = stats.getGetAllTables().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
return client.get_all_tables(databaseName);
}
});
final Callable<Void> getDatabase = stats.getGetDatabase().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
client.get_database(databaseName);
return null;
}
});
try {
return retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("getAllTables", () -> {
List<String> tables = getAllTables.call();
if (tables.isEmpty()) {
// Check to see if the database exists
getDatabase.call();
}
return tables;
});
}
catch (NoSuchObjectException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
@Override
public Table getTable(String databaseName, String tableName)
throws NoSuchObjectException
{
return get(tableCache, HiveTableName.table(databaseName, tableName), NoSuchObjectException.class);
}
@Override
public List<String> getAllViews(String databaseName)
throws NoSuchObjectException
{
return get(viewNamesCache, databaseName, NoSuchObjectException.class);
}
private List<String> loadAllViews(final String databaseName)
throws Exception
{
try {
return retry()
.stopOn(UnknownDBException.class)
.stopOnIllegalExceptions()
.run("getAllViews", stats.getAllViews().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
String filter = HIVE_FILTER_FIELD_PARAMS + PRESTO_VIEW_FLAG + " = \"true\"";
return client.get_table_names_by_filter(databaseName, filter, (short) -1);
}
}));
}
catch (UnknownDBException e) {
throw new NoSuchObjectException(e.getMessage());
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
@Override
public void createTable(final Table table)
{
try {
retry()
.stopOn(AlreadyExistsException.class, InvalidObjectException.class, MetaException.class, NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("createTable", stats.getCreateTable().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
client.create_table(table);
}
tableNamesCache.invalidate(table.getDbName());
viewNamesCache.invalidate(table.getDbName());
return null;
}));
}
catch (AlreadyExistsException e) {
throw new TableAlreadyExistsException(new SchemaTableName(table.getDbName(), table.getTableName()));
}
catch (InvalidObjectException | NoSuchObjectException | MetaException e) {
throw Throwables.propagate(e);
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
throw Throwables.propagate(e);
}
}
@Override
public void dropTable(final String databaseName, final String tableName)
{
try {
retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("dropTable", stats.getDropTable().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
client.drop_table(databaseName, tableName, true);
}
tableCache.invalidate(new HiveTableName(databaseName, tableName));
tableNamesCache.invalidate(databaseName);
viewNamesCache.invalidate(databaseName);
return null;
}));
}
catch (NoSuchObjectException e) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
throw Throwables.propagate(e);
}
}
@Override
public void renameTable(final String databaseName, final String tableName, final String newDatabaseName, final String newTableName)
{
try {
retry()
.stopOn(InvalidOperationException.class, MetaException.class)
.stopOnIllegalExceptions()
.run("renameTable", stats.getRenameTable().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
Table table = new Table(loadTable(new HiveTableName(databaseName, tableName)));
table.setDbName(newDatabaseName);
table.setTableName(newTableName);
client.alter_table(databaseName, tableName, table);
}
tableCache.invalidate(new HiveTableName(databaseName, tableName));
tableNamesCache.invalidate(databaseName);
viewNamesCache.invalidate(databaseName);
return null;
}));
}
catch (InvalidOperationException | MetaException e) {
throw Throwables.propagate(e);
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
throw Throwables.propagate(e);
}
}
private Table loadTable(final HiveTableName hiveTableName)
throws Exception
{
try {
return retry()
.stopOn(NoSuchObjectException.class, HiveViewNotSupportedException.class)
.stopOnIllegalExceptions()
.run("getTable", stats.getGetTable().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
Table table = client.get_table(hiveTableName.getDatabaseName(), hiveTableName.getTableName());
if (table.getTableType().equals(TableType.VIRTUAL_VIEW.name()) && (!isPrestoView(table))) {
throw new HiveViewNotSupportedException(new SchemaTableName(hiveTableName.getDatabaseName(), hiveTableName.getTableName()));
}
return table;
}
}));
}
catch (NoSuchObjectException | HiveViewNotSupportedException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
@Override
public List<String> getPartitionNames(String databaseName, String tableName)
throws NoSuchObjectException
{
return get(partitionNamesCache, HiveTableName.table(databaseName, tableName), NoSuchObjectException.class);
}
private List<String> loadPartitionNames(final HiveTableName hiveTableName)
throws Exception
{
try {
return retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("getPartitionNames", stats.getGetPartitionNames().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
return client.get_partition_names(hiveTableName.getDatabaseName(), hiveTableName.getTableName(), (short) 0);
}
}));
}
catch (NoSuchObjectException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
@Override
public List<String> getPartitionNamesByParts(String databaseName, String tableName, List<String> parts)
throws NoSuchObjectException
{
return get(partitionFilterCache, PartitionFilter.partitionFilter(databaseName, tableName, parts), NoSuchObjectException.class);
}
private List<String> loadPartitionNamesByParts(final PartitionFilter partitionFilter)
throws Exception
{
try {
return retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("getPartitionNamesByParts", stats.getGetPartitionNamesPs().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
return client.get_partition_names_ps(partitionFilter.getHiveTableName().getDatabaseName(),
partitionFilter.getHiveTableName().getTableName(),
partitionFilter.getParts(),
(short) -1);
}
}));
}
catch (NoSuchObjectException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
public Map<String, Partition> getPartitionsByNames(String databaseName, String tableName, List<String> partitionNames)
throws NoSuchObjectException
{
Iterable<HivePartitionName> names = transform(partitionNames, name -> HivePartitionName.partition(databaseName, tableName, name));
ImmutableMap.Builder<String, Partition> partitionsByName = ImmutableMap.builder();
Map<HivePartitionName, Partition> all = getAll(partitionCache, names, NoSuchObjectException.class);
for (Entry<HivePartitionName, Partition> entry : all.entrySet()) {
partitionsByName.put(entry.getKey().getPartitionName(), entry.getValue());
}
return partitionsByName.build();
}
private Partition loadPartitionByName(final HivePartitionName partitionName)
throws Exception
{
checkNotNull(partitionName, "partitionName is null");
try {
return retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("getPartitionsByNames", stats.getGetPartitionByName().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
return client.get_partition_by_name(partitionName.getHiveTableName().getDatabaseName(),
partitionName.getHiveTableName().getTableName(),
partitionName.getPartitionName());
}
}));
}
catch (NoSuchObjectException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
private Map<HivePartitionName, Partition> loadPartitionsByNames(Iterable<? extends HivePartitionName> partitionNames)
throws Exception
{
checkNotNull(partitionNames, "partitionNames is null");
checkArgument(!Iterables.isEmpty(partitionNames), "partitionNames is empty");
HivePartitionName firstPartition = Iterables.get(partitionNames, 0);
HiveTableName hiveTableName = firstPartition.getHiveTableName();
final String databaseName = hiveTableName.getDatabaseName();
final String tableName = hiveTableName.getTableName();
final List<String> partitionsToFetch = new ArrayList<>();
for (HivePartitionName partitionName : partitionNames) {
checkArgument(partitionName.getHiveTableName().equals(hiveTableName), "Expected table name %s but got %s", hiveTableName, partitionName.getHiveTableName());
partitionsToFetch.add(partitionName.getPartitionName());
}
final List<String> partitionColumnNames = ImmutableList.copyOf(Warehouse.makeSpecFromName(firstPartition.getPartitionName()).keySet());
try {
return retry()
.stopOn(NoSuchObjectException.class)
.stopOnIllegalExceptions()
.run("getPartitionsByNames", stats.getGetPartitionsByNames().wrap(() -> {
try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) {
ImmutableMap.Builder<HivePartitionName, Partition> partitions = ImmutableMap.builder();
for (Partition partition : client.get_partitions_by_names(databaseName, tableName, partitionsToFetch)) {
String partitionId = FileUtils.makePartName(partitionColumnNames, partition.getValues(), null);
partitions.put(HivePartitionName.partition(databaseName, tableName, partitionId), partition);
}
return partitions.build();
}
}));
}
catch (NoSuchObjectException e) {
throw e;
}
catch (TException e) {
throw new PrestoException(HIVE_METASTORE_ERROR, e);
}
}
private static class HiveTableName
{
private final String databaseName;
private final String tableName;
private HiveTableName(String databaseName, String tableName)
{
this.databaseName = databaseName;
this.tableName = tableName;
}
public static HiveTableName table(String databaseName, String tableName)
{
return new HiveTableName(databaseName, tableName);
}
public String getDatabaseName()
{
return databaseName;
}
public String getTableName()
{
return tableName;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("databaseName", databaseName)
.add("tableName", tableName)
.toString();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HiveTableName other = (HiveTableName) o;
return Objects.equals(databaseName, other.databaseName) &&
Objects.equals(tableName, other.tableName);
}
@Override
public int hashCode()
{
return Objects.hash(databaseName, tableName);
}
}
private static class HivePartitionName
{
private final HiveTableName hiveTableName;
private final String partitionName;
private HivePartitionName(HiveTableName hiveTableName, String partitionName)
{
this.hiveTableName = hiveTableName;
this.partitionName = partitionName;
}
public static HivePartitionName partition(String databaseName, String tableName, String partitionName)
{
return new HivePartitionName(HiveTableName.table(databaseName, tableName), partitionName);
}
public HiveTableName getHiveTableName()
{
return hiveTableName;
}
public String getPartitionName()
{
return partitionName;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("hiveTableName", hiveTableName)
.add("partitionName", partitionName)
.toString();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HivePartitionName other = (HivePartitionName) o;
return Objects.equals(hiveTableName, other.hiveTableName) &&
Objects.equals(partitionName, other.partitionName);
}
@Override
public int hashCode()
{
return Objects.hash(hiveTableName, partitionName);
}
}
private static class PartitionFilter
{
private final HiveTableName hiveTableName;
private final List<String> parts;
private PartitionFilter(HiveTableName hiveTableName, List<String> parts)
{
this.hiveTableName = hiveTableName;
this.parts = ImmutableList.copyOf(parts);
}
public static PartitionFilter partitionFilter(String databaseName, String tableName, List<String> parts)
{
return new PartitionFilter(HiveTableName.table(databaseName, tableName), parts);
}
public HiveTableName getHiveTableName()
{
return hiveTableName;
}
public List<String> getParts()
{
return parts;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("hiveTableName", hiveTableName)
.add("parts", parts)
.toString();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PartitionFilter other = (PartitionFilter) o;
return Objects.equals(hiveTableName, other.hiveTableName) &&
Objects.equals(parts, other.parts);
}
@Override
public int hashCode()
{
return Objects.hash(hiveTableName, parts);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2011, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
/*
* JBox2D - A Java Port of Erin Catto's Box2D
*
* JBox2D homepage: http://jbox2d.sourceforge.net/
* Box2D homepage: http://www.box2d.org
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
package org.jbox2d.dynamics.joints;
import org.jbox2d.common.Mat22;
import org.jbox2d.common.Mat33;
import org.jbox2d.common.MathUtils;
import org.jbox2d.common.Settings;
import org.jbox2d.common.Vec2;
import org.jbox2d.common.Vec3;
import org.jbox2d.dynamics.Body;
import org.jbox2d.dynamics.TimeStep;
import org.jbox2d.pooling.IWorldPool;
//Point-to-point constraint
//C = p2 - p1
//Cdot = v2 - v1
// = v2 + cross(w2, r2) - v1 - cross(w1, r1)
//J = [-I -r1_skew I r2_skew ]
//Identity used:
//w k % (rx i + ry j) = w * (-ry i + rx j)
//Motor constraint
//Cdot = w2 - w1
//J = [0 0 -1 0 0 1]
//K = invI1 + invI2
/**
* A revolute joint constrains two bodies to share a common point while they
* are free to rotate about the point. The relative rotation about the shared
* point is the joint angle. You can limit the relative rotation with
* a joint limit that specifies a lower and upper angle. You can use a motor
* to drive the relative rotation about the shared point. A maximum motor torque
* is provided so that infinite forces are not generated.
*
* @author Daniel Murphy
*/
public class RevoluteJoint extends Joint {
public final Vec2 m_localAnchor1 = new Vec2(); // relative
public final Vec2 m_localAnchor2 = new Vec2();
public final Vec3 m_impulse = new Vec3();
public float m_motorImpulse;
public final Mat33 m_mass = new Mat33(); // effective mass for point-to-point
// constraint.
public float m_motorMass; // effective mass for motor/limit angular constraint.
public boolean m_enableMotor;
public float m_maxMotorTorque;
public float m_motorSpeed;
public boolean m_enableLimit;
public float m_referenceAngle;
public float m_lowerAngle;
public float m_upperAngle;
public LimitState m_limitState;
public RevoluteJoint(IWorldPool argWorld, RevoluteJointDef def) {
super(argWorld, def);
m_localAnchor1.set(def.localAnchorA);
m_localAnchor2.set(def.localAnchorB);
m_referenceAngle = def.referenceAngle;
m_motorImpulse = 0;
m_lowerAngle = def.lowerAngle;
m_upperAngle = def.upperAngle;
m_maxMotorTorque = def.maxMotorTorque;
m_motorSpeed = def.motorSpeed;
m_enableLimit = def.enableLimit;
m_enableMotor = def.enableMotor;
}
@Override
public void initVelocityConstraints(final TimeStep step) {
final Body b1 = m_bodyA;
final Body b2 = m_bodyB;
if (m_enableMotor || m_enableLimit) {
// You cannot create a rotation limit between bodies that
// both have fixed rotation.
assert (b1.m_invI > 0.0f || b2.m_invI > 0.0f);
}
final Vec2 r1 = pool.popVec2();
final Vec2 r2 = pool.popVec2();
// Compute the effective mass matrix.
r1.set(m_localAnchor1).subLocal(b1.getLocalCenter());
r2.set(m_localAnchor2).subLocal(b2.getLocalCenter());
Mat22.mulToOut(b1.getTransform().R, r1, r1);
Mat22.mulToOut(b2.getTransform().R, r2, r2);
// J = [-I -r1_skew I r2_skew]
// [ 0 -1 0 1]
// r_skew = [-ry; rx]
// Matlab
// K = [ m1+r1y^2*i1+m2+r2y^2*i2, -r1y*i1*r1x-r2y*i2*r2x, -r1y*i1-r2y*i2]
// [ -r1y*i1*r1x-r2y*i2*r2x, m1+r1x^2*i1+m2+r2x^2*i2, r1x*i1+r2x*i2]
// [ -r1y*i1-r2y*i2, r1x*i1+r2x*i2, i1+i2]
// K = [(1/m1 + 1/m2) * eye(2) - skew(r1) * invI1 * skew(r1) - skew(r2) * invI2 *
// skew(r2)]
// = [1/m1+1/m2 0 ] + invI1 * [r1.y*r1.y -r1.x*r1.y] + invI2 * [r1.y*r1.y
// -r1.x*r1.y]
// [ 0 1/m1+1/m2] [-r1.x*r1.y r1.x*r1.x] [-r1.x*r1.y r1.x*r1.x]
float m1 = b1.m_invMass, m2 = b2.m_invMass;
float i1 = b1.m_invI, i2 = b2.m_invI;
m_mass.col1.x = m1 + m2 + r1.y * r1.y * i1 + r2.y * r2.y * i2;
m_mass.col2.x = -r1.y * r1.x * i1 - r2.y * r2.x * i2;
m_mass.col3.x = -r1.y * i1 - r2.y * i2;
m_mass.col1.y = m_mass.col2.x;
m_mass.col2.y = m1 + m2 + r1.x * r1.x * i1 + r2.x * r2.x * i2;
m_mass.col3.y = r1.x * i1 + r2.x * i2;
m_mass.col1.z = m_mass.col3.x;
m_mass.col2.z = m_mass.col3.y;
m_mass.col3.z = i1 + i2;
m_motorMass = i1 + i2;
if (m_motorMass > 0.0f) {
m_motorMass = 1.0f / m_motorMass;
}
if (m_enableMotor == false) {
m_motorImpulse = 0.0f;
}
// System.out.printf("joint angle: %f, %f, and %f\n", b2.m_sweep.a, b1.m_sweep.a, m_referenceAngle);
if (m_enableLimit) {
float jointAngle = b2.m_sweep.a - b1.m_sweep.a - m_referenceAngle;
if (MathUtils.abs(m_upperAngle - m_lowerAngle) < 2.0f * Settings.angularSlop) {
m_limitState = LimitState.EQUAL;
}
else if (jointAngle <= m_lowerAngle) {
if (m_limitState != LimitState.AT_LOWER) {
m_impulse.z = 0.0f;
}
m_limitState = LimitState.AT_LOWER;
}
else if (jointAngle >= m_upperAngle) {
if (m_limitState != LimitState.AT_UPPER) {
m_impulse.z = 0.0f;
}
m_limitState = LimitState.AT_UPPER;
}
else {
m_limitState = LimitState.INACTIVE;
m_impulse.z = 0.0f;
}
}
else {
m_limitState = LimitState.INACTIVE;
}
// System.out.printf("limit state: %s\n", m_limitState.toString());
if (step.warmStarting) {
// Scale impulses to support a variable time step.
m_impulse.mulLocal(step.dtRatio);
m_motorImpulse *= step.dtRatio;
Vec2 temp = pool.popVec2();
Vec2 P = pool.popVec2();
P.set(m_impulse.x, m_impulse.y);
temp.set(P).mulLocal(m1);
b1.m_linearVelocity.subLocal(temp);
b1.m_angularVelocity -= i1 * (Vec2.cross(r1, P) + m_motorImpulse + m_impulse.z);
temp.set(P).mulLocal(m2);
b2.m_linearVelocity.addLocal(temp);
b2.m_angularVelocity += i2 * (Vec2.cross(r2, P) + m_motorImpulse + m_impulse.z);
pool.pushVec2(2);
}
else {
m_impulse.setZero();
m_motorImpulse = 0.0f;
}
pool.pushVec2(2);
}
@Override
public void solveVelocityConstraints(final TimeStep step) {
final Body b1 = m_bodyA;
final Body b2 = m_bodyB;
final Vec2 v1 = b1.m_linearVelocity;
float w1 = b1.m_angularVelocity;
final Vec2 v2 = b2.m_linearVelocity;
float w2 = b2.m_angularVelocity;
float m1 = b1.m_invMass, m2 = b2.m_invMass;
float i1 = b1.m_invI, i2 = b2.m_invI;
// Solve motor constraint.
if (m_enableMotor && m_limitState != LimitState.EQUAL) {
float Cdot = w2 - w1 - m_motorSpeed;
float impulse = m_motorMass * (-Cdot);
float oldImpulse = m_motorImpulse;
float maxImpulse = step.dt * m_maxMotorTorque;
m_motorImpulse = MathUtils.clamp(m_motorImpulse + impulse, -maxImpulse, maxImpulse);
impulse = m_motorImpulse - oldImpulse;
w1 -= i1 * impulse;
w2 += i2 * impulse;
}
final Vec2 temp = pool.popVec2();
final Vec2 r1 = pool.popVec2();
final Vec2 r2 = pool.popVec2();
// Solve limit constraint.
if (m_enableLimit && m_limitState != LimitState.INACTIVE) {
r1.set(m_localAnchor1).subLocal(b1.getLocalCenter());
r2.set(m_localAnchor2).subLocal(b2.getLocalCenter());
Mat22.mulToOut(b1.getTransform().R, r1, r1);
Mat22.mulToOut(b2.getTransform().R, r2, r2);
// Vec2 r1 = b2Mul(b1.getTransform().R, m_localAnchor1 - b1.getLocalCenter());
// Vec2 r2 = b2Mul(b2.getTransform().R, m_localAnchor2 - b2.getLocalCenter());
final Vec2 Cdot1 = pool.popVec2();
final Vec3 Cdot = pool.popVec3();
// Solve point-to-point constraint
Vec2.crossToOut(w1, r1, temp);
Vec2.crossToOut(w2, r2, Cdot1);
Cdot1.addLocal(v2).subLocal(v1).subLocal(temp);
float Cdot2 = w2 - w1;
Cdot.set(Cdot1.x, Cdot1.y, Cdot2);
// Vec2 Cdot1 = v2 + b2Cross(w2, r2) - v1 - b2Cross(w1, r1);
// float Cdot2 = w2 - w1;
// b2Vec3 Cdot(Cdot1.x, Cdot1.y, Cdot2);
Vec3 impulse = pool.popVec3();
m_mass.solve33ToOut(Cdot.negateLocal(), impulse);
// Cdot.negateLocal(); just leave negated, we don't use later
if (m_limitState == LimitState.EQUAL) {
m_impulse.addLocal(impulse);
}
else if (m_limitState == LimitState.AT_LOWER) {
float newImpulse = m_impulse.z + impulse.z;
if (newImpulse < 0.0f) {
m_mass.solve22ToOut(Cdot1.negateLocal(), temp);
//Cdot1.negateLocal(); just leave negated, we don't use it again
impulse.x = temp.x;
impulse.y = temp.y;
impulse.z = -m_impulse.z;
m_impulse.x += temp.x;
m_impulse.y += temp.y;
m_impulse.z = 0.0f;
}
}
else if (m_limitState == LimitState.AT_UPPER) {
float newImpulse = m_impulse.z + impulse.z;
if (newImpulse > 0.0f) {
m_mass.solve22ToOut(Cdot1.negateLocal(), temp);
//Cdot1.negateLocal(); just leave negated, we don't use it again
impulse.x = temp.x;
impulse.y = temp.y;
impulse.z = -m_impulse.z;
m_impulse.x += temp.x;
m_impulse.y += temp.y;
m_impulse.z = 0.0f;
}
}
final Vec2 P = pool.popVec2();
P.set(impulse.x, impulse.y);
temp.set(P).mulLocal(m1);
v1.subLocal(temp);
w1 -= i1 * (Vec2.cross(r1, P) + impulse.z);
temp.set(P).mulLocal(m2);
v2.addLocal(temp);
w2 += i2 * (Vec2.cross(r2, P) + impulse.z);
pool.pushVec2(2);
pool.pushVec3(2);
}
else {
r1.set(m_localAnchor1).subLocal(b1.getLocalCenter());
r2.set(m_localAnchor2).subLocal(b2.getLocalCenter());
Mat22.mulToOut(b1.getTransform().R, r1, r1);
Mat22.mulToOut(b2.getTransform().R, r2, r2);
// Vec2 r1 = b2Mul(b1.getTransform().R, m_localAnchor1 - b1.getLocalCenter());
// Vec2 r2 = b2Mul(b2.getTransform().R, m_localAnchor2 - b2.getLocalCenter());
// Solve point-to-point constraint
Vec2 Cdot = pool.popVec2();
Vec2 impulse = pool.popVec2();
Vec2.crossToOut(w1, r1, temp);
Vec2.crossToOut(w2, r2, Cdot);
Cdot.addLocal(v2).subLocal(v1).subLocal(temp);
m_mass.solve22ToOut(Cdot.negateLocal(), impulse); // just leave negated
m_impulse.x += impulse.x;
m_impulse.y += impulse.y;
temp.set(impulse).mulLocal(m1);
v1.subLocal(temp);
w1 -= i1 * Vec2.cross(r1, impulse);
temp.set(impulse).mulLocal(m2);
v2.addLocal(temp);
w2 += i2 * Vec2.cross(r2, impulse);
pool.pushVec2(2);
}
b1.m_angularVelocity = w1;
b2.m_angularVelocity = w2;
pool.pushVec2(3);
}
@Override
public boolean solvePositionConstraints(float baumgarte) {
final Body b1 = m_bodyA;
final Body b2 = m_bodyB;
// TODO_ERIN block solve with limit.
float angularError = 0.0f;
float positionError = 0.0f;
// Solve angular limit constraint.
if (m_enableLimit && m_limitState != LimitState.INACTIVE) {
float angle = b2.m_sweep.a - b1.m_sweep.a - m_referenceAngle;
float limitImpulse = 0.0f;
if (m_limitState == LimitState.EQUAL) {
// Prevent large angular corrections
float C = MathUtils.clamp(angle - m_lowerAngle, -Settings.maxAngularCorrection,
Settings.maxAngularCorrection);
limitImpulse = -m_motorMass * C;
angularError = MathUtils.abs(C);
}
else if (m_limitState == LimitState.AT_LOWER) {
float C = angle - m_lowerAngle;
angularError = -C;
// Prevent large angular corrections and allow some slop.
C = MathUtils.clamp(C + Settings.angularSlop, -Settings.maxAngularCorrection, 0.0f);
limitImpulse = -m_motorMass * C;
}
else if (m_limitState == LimitState.AT_UPPER) {
float C = angle - m_upperAngle;
angularError = C;
// Prevent large angular corrections and allow some slop.
C = MathUtils.clamp(C - Settings.angularSlop, 0.0f, Settings.maxAngularCorrection);
limitImpulse = -m_motorMass * C;
}
b1.m_sweep.a -= b1.m_invI * limitImpulse;
b2.m_sweep.a += b2.m_invI * limitImpulse;
b1.synchronizeTransform();
b2.synchronizeTransform();
}
// Solve point-to-point constraint.
{
Vec2 impulse = pool.popVec2();
Vec2 r1 = pool.popVec2();
Vec2 r2 = pool.popVec2();
Vec2 C = pool.popVec2();
r1.set(m_localAnchor1).subLocal(b1.getLocalCenter());
r2.set(m_localAnchor2).subLocal(b2.getLocalCenter());
Mat22.mulToOut(b1.getTransform().R, r1, r1);
Mat22.mulToOut(b2.getTransform().R, r2, r2);
C.set(b2.m_sweep.c).addLocal(r2).subLocal(b1.m_sweep.c).subLocal(r1);
positionError = C.length();
float invMass1 = b1.m_invMass, invMass2 = b2.m_invMass;
float invI1 = b1.m_invI, invI2 = b2.m_invI;
// Handle large detachment.
final float k_allowedStretch = 10.0f * Settings.linearSlop;
if (C.lengthSquared() > k_allowedStretch * k_allowedStretch) {
Vec2 u = pool.popVec2();
// Use a particle solution (no rotation).
// u.set(C);
// u.normalize(); ?? we don't even use this
float m = invMass1 + invMass2;
if (m > 0.0f) {
m = 1.0f / m;
}
impulse.set(C).negateLocal().mulLocal(m);
final float k_beta = 0.5f;
// using u as temp variable
u.set(impulse).mulLocal(k_beta * invMass1);
b1.m_sweep.c.subLocal(u);
u.set(impulse).mulLocal(k_beta * invMass2);
b2.m_sweep.c.addLocal(u);
C.set(b2.m_sweep.c).addLocal(r2).subLocal(b1.m_sweep.c).subLocal(r1);
pool.pushVec2(1);
}
Mat22 K1 = pool.popMat22();
K1.col1.x = invMass1 + invMass2;
K1.col2.x = 0.0f;
K1.col1.y = 0.0f;
K1.col2.y = invMass1 + invMass2;
Mat22 K2 = pool.popMat22();
K2.col1.x = invI1 * r1.y * r1.y;
K2.col2.x = -invI1 * r1.x * r1.y;
K2.col1.y = -invI1 * r1.x * r1.y;
K2.col2.y = invI1 * r1.x * r1.x;
Mat22 K3 = pool.popMat22();
K3.col1.x = invI2 * r2.y * r2.y;
K3.col2.x = -invI2 * r2.x * r2.y;
K3.col1.y = -invI2 * r2.x * r2.y;
K3.col2.y = invI2 * r2.x * r2.x;
K1.addLocal(K2).addLocal(K3);
K1.solveToOut(C.negateLocal(), impulse); // just leave c negated
// using C as temp variable
C.set(impulse).mulLocal(b1.m_invMass);
b1.m_sweep.c.subLocal(C);
b1.m_sweep.a -= b1.m_invI * Vec2.cross(r1, impulse);
C.set(impulse).mulLocal(b2.m_invMass);
b2.m_sweep.c.addLocal(C);
b2.m_sweep.a += b2.m_invI * Vec2.cross(r2, impulse);
b1.synchronizeTransform();
b2.synchronizeTransform();
pool.pushMat22(3);
pool.pushVec2(4);
}
return positionError <= Settings.linearSlop && angularError <= Settings.angularSlop;
}
@Override
public void getAnchorA(Vec2 argOut) {
m_bodyA.getWorldPointToOut(m_localAnchor1, argOut);
}
@Override
public void getAnchorB(Vec2 argOut) {
m_bodyB.getWorldPointToOut(m_localAnchor2, argOut);
}
@Override
public void getReactionForce(float inv_dt, Vec2 argOut) {
argOut.set(m_impulse.x, m_impulse.y).mulLocal(inv_dt);
}
@Override
public float getReactionTorque(float inv_dt) {
return inv_dt * m_impulse.z;
}
public float getJointAngle() {
final Body b1 = m_bodyA;
final Body b2 = m_bodyB;
return b2.m_sweep.a - b1.m_sweep.a - m_referenceAngle;
}
public float getJointSpeed() {
final Body b1 = m_bodyA;
final Body b2 = m_bodyB;
return b2.m_angularVelocity - b1.m_angularVelocity;
}
public boolean isMotorEnabled() {
return m_enableMotor;
}
public void enableMotor(boolean flag) {
m_bodyA.setAwake(true);
m_bodyB.setAwake(true);
m_enableMotor = flag;
}
public float getMotorTorque() {
return m_motorImpulse;
}
public void setMotorSpeed(final float speed) {
m_bodyA.setAwake(true);
m_bodyB.setAwake(true);
m_motorSpeed = speed;
}
public void setMaxMotorTorque(final float torque) {
m_bodyA.setAwake(true);
m_bodyB.setAwake(true);
m_maxMotorTorque = torque;
}
public boolean isLimitEnabled() {
return m_enableLimit;
}
public void enableLimit(final boolean flag) {
m_bodyA.setAwake(true);
m_bodyB.setAwake(true);
m_enableLimit = flag;
}
public float getLowerLimit() {
return m_lowerAngle;
}
public float getUpperLimit() {
return m_upperAngle;
}
public void setLimits(final float lower, final float upper) {
assert (lower <= upper);
m_bodyA.setAwake(true);
m_bodyB.setAwake(true);
m_lowerAngle = lower;
m_upperAngle = upper;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.search.ESToParentBlockJoinQuery;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.VersionUtils;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.index.query.InnerHitBuilderTests.randomInnerHits;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBuilder> {
boolean requiresRewrite = false;
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge("doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("doc",
STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object",
GEO_POINT_FIELD_NAME, "type=geo_point",
"nested1", "type=nested"
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
/**
* @return a {@link NestedQueryBuilder} with random values all over the place
*/
@Override
protected NestedQueryBuilder doCreateTestQueryBuilder() {
QueryBuilder innerQueryBuilder = RandomQueryBuilder.createQuery(random());
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
}
NestedQueryBuilder nqb = new NestedQueryBuilder("nested1", innerQueryBuilder,
RandomPicks.randomFrom(random(), ScoreMode.values()));
nqb.ignoreUnmapped(randomBoolean());
if (randomBoolean()) {
nqb.innerHit(new InnerHitBuilder(randomAlphaOfLengthBetween(1, 10))
.setSize(randomIntBetween(0, 100))
.addSort(new FieldSortBuilder(INT_FIELD_NAME).order(SortOrder.ASC))
.setIgnoreUnmapped(nqb.ignoreUnmapped()));
}
return nqb;
}
@Override
protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, instanceOf(ESToParentBlockJoinQuery.class));
// TODO how to assert this?
if (queryBuilder.innerHit() != null) {
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
// doCreateTestQueryBuilder)
queryBuilder = (NestedQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
assertNotNull(searchContext);
Map<String, InnerHitContextBuilder> innerHitInternals = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitInternals);
for (InnerHitContextBuilder builder : innerHitInternals.values()) {
builder.build(searchContext, searchContext.innerHits());
}
assertNotNull(searchContext.innerHits());
assertEquals(1, searchContext.innerHits().getInnerHits().size());
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
InnerHitsContext.InnerHitSubContext innerHits = searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName());
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
assertEquals(innerHits.sort().sort.getSort().length, 1);
assertEquals(innerHits.sort().sort.getSort()[0].getField(), INT_FIELD_NAME);
}
}
/**
* Test (de)serialization on all previous released versions
*/
public void testSerializationBWC() throws IOException {
for (Version version : VersionUtils.allReleasedVersions()) {
NestedQueryBuilder testQuery = createTestQueryBuilder();
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
// ignore unmapped for inner_hits has been added on 5.2
testQuery.innerHit().setIgnoreUnmapped(false);
}
assertSerialization(testQuery, version);
}
}
public void testValidate() {
QueryBuilder innerQuery = RandomQueryBuilder.createQuery(random());
IllegalArgumentException e =
expectThrows(IllegalArgumentException.class, () -> QueryBuilders.nestedQuery(null, innerQuery, ScoreMode.Avg));
assertThat(e.getMessage(), equalTo("[nested] requires 'path' field"));
e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.nestedQuery("foo", null, ScoreMode.Avg));
assertThat(e.getMessage(), equalTo("[nested] requires 'query' field"));
e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.nestedQuery("foo", innerQuery, null));
assertThat(e.getMessage(), equalTo("[nested] requires 'score_mode' field"));
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"nested\" : {\n" +
" \"query\" : {\n" +
" \"bool\" : {\n" +
" \"must\" : [ {\n" +
" \"match\" : {\n" +
" \"obj1.name\" : {\n" +
" \"query\" : \"blue\",\n" +
" \"operator\" : \"OR\",\n" +
" \"prefix_length\" : 0,\n" +
" \"max_expansions\" : 50,\n" +
" \"fuzzy_transpositions\" : true,\n" +
" \"lenient\" : false,\n" +
" \"zero_terms_query\" : \"NONE\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" }, {\n" +
" \"range\" : {\n" +
" \"obj1.count\" : {\n" +
" \"from\" : 5,\n" +
" \"to\" : null,\n" +
" \"include_lower\" : false,\n" +
" \"include_upper\" : true,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" } ],\n" +
" \"adjust_pure_negative\" : true,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" },\n" +
" \"path\" : \"obj1\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"score_mode\" : \"avg\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
NestedQueryBuilder parsed = (NestedQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, ScoreMode.Avg, parsed.scoreMode());
}
@Override
public void testMustRewrite() throws IOException {
try {
super.testMustRewrite();
} catch (UnsupportedOperationException e) {
if (requiresRewrite == false) {
throw e;
}
}
}
public void testIgnoreUnmapped() throws IOException {
final NestedQueryBuilder queryBuilder = new NestedQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
queryBuilder.ignoreUnmapped(true);
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final NestedQueryBuilder failingQueryBuilder = new NestedQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
failingQueryBuilder.ignoreUnmapped(false);
IllegalStateException e = expectThrows(IllegalStateException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(), containsString("[" + NestedQueryBuilder.NAME + "] failed to find nested object under path [unmapped]"));
}
public void testIgnoreUnmappedWithRewrite() throws IOException {
// WrapperQueryBuilder makes sure we always rewrite
final NestedQueryBuilder queryBuilder =
new NestedQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), ScoreMode.None);
queryBuilder.ignoreUnmapped(true);
QueryShardContext queryShardContext = createShardContext();
Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
public void testMinFromString() {
assertThat("fromString(min) != MIN", ScoreMode.Min, equalTo(NestedQueryBuilder.parseScoreMode("min")));
assertThat("min", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Min)));
}
public void testMaxFromString() {
assertThat("fromString(max) != MAX", ScoreMode.Max, equalTo(NestedQueryBuilder.parseScoreMode("max")));
assertThat("max", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Max)));
}
public void testAvgFromString() {
assertThat("fromString(avg) != AVG", ScoreMode.Avg, equalTo(NestedQueryBuilder.parseScoreMode("avg")));
assertThat("avg", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Avg)));
}
public void testSumFromString() {
assertThat("fromString(total) != SUM", ScoreMode.Total, equalTo(NestedQueryBuilder.parseScoreMode("sum")));
assertThat("sum", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.Total)));
}
public void testNoneFromString() {
assertThat("fromString(none) != NONE", ScoreMode.None, equalTo(NestedQueryBuilder.parseScoreMode("none")));
assertThat("none", equalTo(NestedQueryBuilder.scoreModeAsString(ScoreMode.None)));
}
/**
* Should throw {@link IllegalArgumentException} instead of NPE.
*/
public void testThatNullFromStringThrowsException() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> NestedQueryBuilder.parseScoreMode(null));
assertEquals("No score mode for child query [null] found", e.getMessage());
}
/**
* Failure should not change (and the value should never match anything...).
*/
public void testThatUnrecognizedFromStringThrowsException() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> NestedQueryBuilder.parseScoreMode("unrecognized value"));
assertEquals("No score mode for child query [unrecognized value] found", e.getMessage());
}
public void testInlineLeafInnerHitsNestedQuery() throws Exception {
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None);
nestedQueryBuilder.innerHit(leafInnerHits);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
nestedQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.get(leafInnerHits.getName()), Matchers.notNullValue());
}
public void testInlineLeafInnerHitsNestedQueryViaBoolQuery() {
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
.innerHit(leafInnerHits);
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder().should(nestedQueryBuilder);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
boolQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.get(leafInnerHits.getName()), Matchers.notNullValue());
}
public void testInlineLeafInnerHitsNestedQueryViaConstantScoreQuery() {
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
.innerHit(leafInnerHits);
ConstantScoreQueryBuilder constantScoreQueryBuilder = new ConstantScoreQueryBuilder(nestedQueryBuilder);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
constantScoreQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.get(leafInnerHits.getName()), Matchers.notNullValue());
}
public void testInlineLeafInnerHitsNestedQueryViaBoostingQuery() {
InnerHitBuilder leafInnerHits1 = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder1 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
.innerHit(leafInnerHits1);
InnerHitBuilder leafInnerHits2 = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder2 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
.innerHit(leafInnerHits2);
BoostingQueryBuilder constantScoreQueryBuilder = new BoostingQueryBuilder(nestedQueryBuilder1, nestedQueryBuilder2);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
constantScoreQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.get(leafInnerHits1.getName()), Matchers.notNullValue());
assertThat(innerHitBuilders.get(leafInnerHits2.getName()), Matchers.notNullValue());
}
public void testInlineLeafInnerHitsNestedQueryViaFunctionScoreQuery() {
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
.innerHit(leafInnerHits);
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(nestedQueryBuilder);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
((AbstractQueryBuilder<?>) functionScoreQueryBuilder).extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.get(leafInnerHits.getName()), Matchers.notNullValue());
}
public void testBuildIgnoreUnmappedNestQuery() throws Exception {
QueryShardContext queryShardContext = mock(QueryShardContext.class);
when(queryShardContext.getObjectMapper("path")).thenReturn(null);
SearchContext searchContext = mock(SearchContext.class);
when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);
InnerHitBuilder leafInnerHits = randomInnerHits();
NestedQueryBuilder query1 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None);
query1.innerHit(leafInnerHits);
final Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
final InnerHitsContext innerHitsContext = new InnerHitsContext();
expectThrows(IllegalStateException.class, () -> {
query1.extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.size(), Matchers.equalTo(1));
assertTrue(innerHitBuilders.containsKey(leafInnerHits.getName()));
innerHitBuilders.get(leafInnerHits.getName()).build(searchContext, innerHitsContext);
});
innerHitBuilders.clear();
NestedQueryBuilder query2 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None);
query2.innerHit(leafInnerHits.setIgnoreUnmapped(true));
query2.extractInnerHitBuilders(innerHitBuilders);
assertThat(innerHitBuilders.size(), Matchers.equalTo(1));
assertTrue(innerHitBuilders.containsKey(leafInnerHits.getName()));
assertThat(innerHitBuilders.get(leafInnerHits.getName()), instanceOf(NestedQueryBuilder.NestedInnerHitContextBuilder.class));
NestedQueryBuilder.NestedInnerHitContextBuilder nestedContextBuilder =
(NestedQueryBuilder.NestedInnerHitContextBuilder) innerHitBuilders.get(leafInnerHits.getName());
nestedContextBuilder.build(searchContext, innerHitsContext);
assertThat(innerHitsContext.getInnerHits().size(), Matchers.equalTo(0));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.query.ast;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.Sets.newLinkedHashSet;
import static org.apache.jackrabbit.oak.query.ast.AstElementFactory.copyElementAndCheckReference;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.jackrabbit.oak.query.QueryEngineSettings;
import org.apache.jackrabbit.oak.query.fulltext.FullTextAnd;
import org.apache.jackrabbit.oak.query.fulltext.FullTextExpression;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import com.google.common.collect.Sets;
/**
* An AND condition.
*/
public class AndImpl extends ConstraintImpl {
private final List<ConstraintImpl> constraints;
public AndImpl(List<ConstraintImpl> constraints) {
checkArgument(!constraints.isEmpty());
this.constraints = constraints;
}
public AndImpl(ConstraintImpl constraint1, ConstraintImpl constraint2) {
this(Arrays.asList(constraint1, constraint2));
}
public List<ConstraintImpl> getConstraints() {
return constraints;
}
@Override
public ConstraintImpl simplify() {
// Use LinkedHashSet to eliminate duplicate constraints while keeping
// the ordering for test cases (and clients?) that depend on it
LinkedHashSet<ConstraintImpl> simplified = newLinkedHashSet();
boolean changed = false; // keep track of changes in simplification
for (ConstraintImpl constraint : constraints) {
ConstraintImpl simple = constraint.simplify();
if (simple instanceof AndImpl) {
// unwind nested AND constraints
simplified.addAll(((AndImpl) simple).constraints);
changed = true;
} else if (simplified.add(simple)) {
// check if this constraint got simplified
changed = changed || simple != constraint;
} else {
// this constraint was a duplicate of a previous one
changed = true;
}
}
if (simplified.size() == 1) {
return simplified.iterator().next();
} else if (changed) {
return new AndImpl(newArrayList(simplified));
} else {
return this;
}
}
@Override
ConstraintImpl not() {
// not (X and Y) == (not X) or (not Y)
List<ConstraintImpl> list = newArrayList();
for (ConstraintImpl constraint : constraints) {
list.add(new NotImpl(constraint));
}
return new OrImpl(list).simplify();
}
@Override
public Set<PropertyExistenceImpl> getPropertyExistenceConditions() {
Set<PropertyExistenceImpl> result = newHashSet();
for (ConstraintImpl constraint : constraints) {
result.addAll(constraint.getPropertyExistenceConditions());
}
return result;
}
@Override
public FullTextExpression getFullTextConstraint(SelectorImpl s) {
List<FullTextExpression> list = newArrayList();
for (ConstraintImpl constraint : constraints) {
FullTextExpression expression = constraint.getFullTextConstraint(s);
if (expression != null) {
list.add(expression);
}
}
switch (list.size()) {
case 0:
return null;
case 1:
return list.iterator().next();
default:
return new FullTextAnd(list);
}
}
@Override
public Set<SelectorImpl> getSelectors() {
Set<SelectorImpl> result = newHashSet();
for (ConstraintImpl constraint : constraints) {
result.addAll(constraint.getSelectors());
}
return result;
}
@Override
public boolean evaluate() {
for (ConstraintImpl constraint : constraints) {
if (!constraint.evaluate()) {
return false;
}
}
return true;
}
@Override
public boolean evaluateStop() {
// the logic is reversed here:
// if one of the conditions is to stop, then we stop
for (ConstraintImpl constraint : constraints) {
if (constraint.evaluateStop()) {
return true;
}
}
return false;
}
@Override
boolean accept(AstVisitor v) {
return v.visit(this);
}
@Override
public void restrict(FilterImpl f) {
for (ConstraintImpl constraint : constraints) {
constraint.restrict(f);
}
}
@Override
public void restrictPushDown(SelectorImpl s) {
for (ConstraintImpl constraint : constraints) {
constraint.restrictPushDown(s);
}
}
//------------------------------------------------------------< Object >--
@Override
public String toString() {
if (constraints.size() == 1) {
return constraints.iterator().next().toString();
} else {
StringBuilder builder = new StringBuilder();
for (ConstraintImpl constraint : constraints) {
if (builder.length() > 0) {
builder.append(" and ");
}
builder.append(protect(constraint));
}
return builder.toString();
}
}
@Override
public boolean equals(Object that) {
if (this == that) {
return true;
} else if (that instanceof AndImpl) {
return constraints.equals(((AndImpl) that).constraints);
} else {
return false;
}
}
@Override
public int hashCode() {
return constraints.hashCode();
}
@Override
public AstElement copyOf() {
List<ConstraintImpl> clone = new ArrayList<ConstraintImpl>(constraints.size());
for (ConstraintImpl c : constraints) {
clone.add((ConstraintImpl) copyElementAndCheckReference(c));
}
return new AndImpl(clone);
}
public void addToUnionList(Set<ConstraintImpl> target) {
// conditions of type
// @a = 1 and (@x = 1 or @y = 2)
// are automatically converted to
// (@a = 1 and @x = 1) union (@a = 1 and @y = 2)
AndImpl and = pullOrRight();
ConstraintImpl last = and.getLastConstraint();
if (last instanceof OrImpl) {
OrImpl or = (OrImpl) last;
// same as above, but with the added "and"
for(ConstraintImpl c : or.getConstraints()) {
ArrayList<ConstraintImpl> list = and.getFirstConstraints();
list.add(c);
new AndImpl(list).addToUnionList(target);
}
return;
}
target.add(this);
}
private ArrayList<ConstraintImpl> getFirstConstraints() {
ArrayList<ConstraintImpl> list = new ArrayList<ConstraintImpl>(constraints.size() - 1);
list.addAll(constraints.subList(0, constraints.size() - 1));
return list;
}
private ConstraintImpl getLastConstraint() {
return constraints.get(constraints.size() - 1);
}
public AndImpl pullOrRight() {
if (getLastConstraint() instanceof OrImpl) {
return this;
}
ArrayList<ConstraintImpl> andList = getAllAndConditions();
for (int i = 0; i < andList.size() - 1; i++) {
ConstraintImpl c = andList.get(i);
if (c instanceof OrImpl) {
ArrayList<ConstraintImpl> list = new ArrayList<ConstraintImpl>();
list.addAll(andList);
list.remove(i);
list.add(c);
return new AndImpl(list);
}
}
return this;
}
private ArrayList<ConstraintImpl> getAllAndConditions() {
ArrayList<ConstraintImpl> list = new ArrayList<ConstraintImpl>();
for(ConstraintImpl c : constraints) {
if (c instanceof AndImpl) {
list.addAll(((AndImpl) c).getAllAndConditions());
} else {
list.add(c);
}
}
return list;
}
@Override
public Set<ConstraintImpl> convertToUnion() {
// use linked hash sets where needed, so that the order of queries
// within the UNION is always the same (independent of the JVM
// implementation)
Set<ConstraintImpl> union = Sets.newLinkedHashSet();
Set<ConstraintImpl> result = Sets.newLinkedHashSet();
Set<ConstraintImpl> nonUnion = Sets.newHashSet();
for (ConstraintImpl c : constraints) {
Set<ConstraintImpl> converted = c.convertToUnion();
if (converted.isEmpty()) {
nonUnion.add(c);
} else {
union.addAll(converted);
}
}
if (!union.isEmpty() && nonUnion.size() == 1) {
// this is the simplest case where, for example, out of the two AND operands at least
// one is a non-union. For example WHERE (a OR b OR c) AND d
ConstraintImpl right = nonUnion.iterator().next();
for (ConstraintImpl c : union) {
result.add(new AndImpl(c, right));
}
} else {
// This could happen when for
// example: WHERE (a OR b) AND (c OR d).
// This can be translated into a AND c, a AND d, b AND c, b AND d.
if (QueryEngineSettings.SQL2_OPTIMIZATION_2) {
Set<ConstraintImpl> set = Sets.newLinkedHashSet();
addToUnionList(set);
if (set.size() == 1) {
// not a union: same condition as before
return Collections.emptySet();
}
return set;
}
}
return result;
}
@Override
public boolean requiresFullTextIndex() {
for (ConstraintImpl c : constraints) {
if (c.requiresFullTextIndex()) {
return true;
}
}
return false;
}
@Override
public boolean containsUnfilteredFullTextCondition() {
for (ConstraintImpl c : constraints) {
if (c.containsUnfilteredFullTextCondition()) {
return true;
}
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.File;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import java.util.*;
import com.google.common.base.Joiner;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import org.apache.commons.cli.*;
import org.apache.cassandra.auth.PasswordAuthenticator;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.SSTableLoader;
import org.apache.cassandra.schema.LegacySchemaTables;
import org.apache.cassandra.streaming.*;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.OutputHandler;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TTransport;
public class BulkLoader
{
private static final String TOOL_NAME = "sstableloader";
private static final String VERBOSE_OPTION = "verbose";
private static final String HELP_OPTION = "help";
private static final String NOPROGRESS_OPTION = "no-progress";
private static final String IGNORE_NODES_OPTION = "ignore";
private static final String INITIAL_HOST_ADDRESS_OPTION = "nodes";
private static final String RPC_PORT_OPTION = "port";
private static final String USER_OPTION = "username";
private static final String PASSWD_OPTION = "password";
private static final String THROTTLE_MBITS = "throttle";
private static final String TRANSPORT_FACTORY = "transport-factory";
/* client encryption options */
private static final String SSL_TRUSTSTORE = "truststore";
private static final String SSL_TRUSTSTORE_PW = "truststore-password";
private static final String SSL_KEYSTORE = "keystore";
private static final String SSL_KEYSTORE_PW = "keystore-password";
private static final String SSL_PROTOCOL = "ssl-protocol";
private static final String SSL_ALGORITHM = "ssl-alg";
private static final String SSL_STORE_TYPE = "store-type";
private static final String SSL_CIPHER_SUITES = "ssl-ciphers";
private static final String CONNECTIONS_PER_HOST = "connections-per-host";
private static final String CONFIG_PATH = "conf-path";
public static void main(String args[])
{
LoaderOptions options = LoaderOptions.parseArgs(args);
OutputHandler handler = new OutputHandler.SystemOutput(options.verbose, options.debug);
SSTableLoader loader = new SSTableLoader(
options.directory,
new ExternalClient(
options.hosts,
options.rpcPort,
options.user,
options.passwd,
options.transportFactory,
options.storagePort,
options.sslStoragePort,
options.serverEncOptions),
handler,
options.connectionsPerHost);
DatabaseDescriptor.setStreamThroughputOutboundMegabitsPerSec(options.throttle);
StreamResultFuture future = null;
ProgressIndicator indicator = new ProgressIndicator();
try
{
if (options.noProgress)
{
future = loader.stream(options.ignores);
}
else
{
future = loader.stream(options.ignores, indicator);
}
}
catch (Exception e)
{
JVMStabilityInspector.inspectThrowable(e);
System.err.println(e.getMessage());
if (e.getCause() != null)
System.err.println(e.getCause());
e.printStackTrace(System.err);
System.exit(1);
}
try
{
future.get();
if (!options.noProgress)
indicator.printSummary(options.connectionsPerHost);
// Give sockets time to gracefully close
Thread.sleep(1000);
System.exit(0); // We need that to stop non daemonized threads
}
catch (Exception e)
{
System.err.println("Streaming to the following hosts failed:");
System.err.println(loader.getFailedHosts());
e.printStackTrace(System.err);
System.exit(1);
}
}
// Return true when everything is at 100%
static class ProgressIndicator implements StreamEventHandler
{
private long start;
private long lastProgress;
private long lastTime;
private int peak = 0;
private int totalFiles = 0;
private final Multimap<InetAddress, SessionInfo> sessionsByHost = HashMultimap.create();
public ProgressIndicator()
{
start = lastTime = System.nanoTime();
}
public void onSuccess(StreamState finalState) {}
public void onFailure(Throwable t) {}
public synchronized void handleStreamEvent(StreamEvent event)
{
if (event.eventType == StreamEvent.Type.STREAM_PREPARED)
{
SessionInfo session = ((StreamEvent.SessionPreparedEvent) event).session;
sessionsByHost.put(session.peer, session);
}
else if (event.eventType == StreamEvent.Type.FILE_PROGRESS || event.eventType == StreamEvent.Type.STREAM_COMPLETE)
{
ProgressInfo progressInfo = null;
if (event.eventType == StreamEvent.Type.FILE_PROGRESS)
{
progressInfo = ((StreamEvent.ProgressEvent) event).progress;
}
long time = System.nanoTime();
long deltaTime = time - lastTime;
StringBuilder sb = new StringBuilder();
sb.append("\rprogress: ");
long totalProgress = 0;
long totalSize = 0;
boolean updateTotalFiles = totalFiles == 0;
// recalculate progress across all sessions in all hosts and display
for (InetAddress peer : sessionsByHost.keySet())
{
sb.append("[").append(peer).append("]");
for (SessionInfo session : sessionsByHost.get(peer))
{
long size = session.getTotalSizeToSend();
long current = 0;
int completed = 0;
if (progressInfo != null && session.peer.equals(progressInfo.peer) && (session.sessionIndex == progressInfo.sessionIndex))
{
session.updateProgress(progressInfo);
}
for (ProgressInfo progress : session.getSendingFiles())
{
if (progress.isCompleted())
completed++;
current += progress.currentBytes;
}
totalProgress += current;
totalSize += size;
sb.append(session.sessionIndex).append(":");
sb.append(completed).append("/").append(session.getTotalFilesToSend());
sb.append(" ").append(String.format("%-3d", size == 0 ? 100L : current * 100L / size)).append("% ");
if (updateTotalFiles)
totalFiles += session.getTotalFilesToSend();
}
}
lastTime = time;
long deltaProgress = totalProgress - lastProgress;
lastProgress = totalProgress;
sb.append("total: ").append(totalSize == 0 ? 100L : totalProgress * 100L / totalSize).append("% ");
sb.append(String.format("%-3d", mbPerSec(deltaProgress, deltaTime))).append("MB/s");
int average = mbPerSec(totalProgress, (time - start));
if (average > peak)
peak = average;
sb.append("(avg: ").append(average).append(" MB/s)");
System.err.print(sb.toString());
}
}
private int mbPerSec(long bytes, long timeInNano)
{
double bytesPerNano = ((double)bytes) / timeInNano;
return (int)((bytesPerNano * 1000 * 1000 * 1000) / (1024 * 1024));
}
private void printSummary(int connectionsPerHost)
{
long end = System.nanoTime();
long durationMS = ((end - start) / (1000000));
int average = mbPerSec(lastProgress, (end - start));
StringBuilder sb = new StringBuilder();
sb.append("\nSummary statistics: \n");
sb.append(String.format(" %-30s: %-10d%n", "Connections per host: ", connectionsPerHost));
sb.append(String.format(" %-30s: %-10d%n", "Total files transferred: ", totalFiles));
sb.append(String.format(" %-30s: %-10d%n", "Total bytes transferred: ", lastProgress));
sb.append(String.format(" %-30s: %-10d%n", "Total duration (ms): ", durationMS));
sb.append(String.format(" %-30s: %-10d%n", "Average transfer rate (MB/s): ", + average));
sb.append(String.format(" %-30s: %-10d%n", "Peak transfer rate (MB/s): ", + peak));
System.err.println(sb.toString());
}
}
static class ExternalClient extends SSTableLoader.Client
{
private final Map<String, CFMetaData> knownCfs = new HashMap<>();
private final Set<InetAddress> hosts;
private final int rpcPort;
private final String user;
private final String passwd;
private final ITransportFactory transportFactory;
private final int storagePort;
private final int sslStoragePort;
private final EncryptionOptions.ServerEncryptionOptions serverEncOptions;
public ExternalClient(Set<InetAddress> hosts,
int port,
String user,
String passwd,
ITransportFactory transportFactory,
int storagePort,
int sslStoragePort,
EncryptionOptions.ServerEncryptionOptions serverEncryptionOptions)
{
super();
this.hosts = hosts;
this.rpcPort = port;
this.user = user;
this.passwd = passwd;
this.transportFactory = transportFactory;
this.storagePort = storagePort;
this.sslStoragePort = sslStoragePort;
this.serverEncOptions = serverEncryptionOptions;
}
@Override
public void init(String keyspace)
{
Iterator<InetAddress> hostiter = hosts.iterator();
while (hostiter.hasNext())
{
try
{
// Query endpoint to ranges map and schemas from thrift
InetAddress host = hostiter.next();
Cassandra.Client client = createThriftClient(host.getHostAddress(), rpcPort, this.user, this.passwd, this.transportFactory);
setPartitioner(client.describe_partitioner());
Token.TokenFactory tkFactory = getPartitioner().getTokenFactory();
for (TokenRange tr : client.describe_ring(keyspace))
{
Range<Token> range = new Range<>(tkFactory.fromString(tr.start_token), tkFactory.fromString(tr.end_token));
for (String ep : tr.endpoints)
{
addRangeForEndpoint(range, InetAddress.getByName(ep));
}
}
String cfQuery = String.format("SELECT * FROM %s.%s WHERE keyspace_name = '%s'",
SystemKeyspace.NAME,
LegacySchemaTables.COLUMNFAMILIES,
keyspace);
CqlResult cfRes = client.execute_cql3_query(ByteBufferUtil.bytes(cfQuery), Compression.NONE, ConsistencyLevel.ONE);
for (CqlRow row : cfRes.rows)
{
String columnFamily = UTF8Type.instance.getString(row.columns.get(1).bufferForName());
String columnsQuery = String.format("SELECT * FROM %s.%s WHERE keyspace_name = '%s' AND columnfamily_name = '%s'",
SystemKeyspace.NAME,
LegacySchemaTables.COLUMNS,
keyspace,
columnFamily);
CqlResult columnsRes = client.execute_cql3_query(ByteBufferUtil.bytes(columnsQuery), Compression.NONE, ConsistencyLevel.ONE);
CFMetaData metadata = ThriftConversion.fromThriftCqlRow(row, columnsRes);
knownCfs.put(metadata.cfName, metadata);
}
break;
}
catch (Exception e)
{
if (!hostiter.hasNext())
throw new RuntimeException("Could not retrieve endpoint ranges: ", e);
}
}
}
@Override
public StreamConnectionFactory getConnectionFactory()
{
return new BulkLoadConnectionFactory(storagePort, sslStoragePort, serverEncOptions, false);
}
@Override
public CFMetaData getCFMetaData(String keyspace, String cfName)
{
return knownCfs.get(cfName);
}
private static Cassandra.Client createThriftClient(String host, int port, String user, String passwd, ITransportFactory transportFactory) throws Exception
{
TTransport trans = transportFactory.openTransport(host, port);
TProtocol protocol = new TBinaryProtocol(trans);
Cassandra.Client client = new Cassandra.Client(protocol);
if (user != null && passwd != null)
{
Map<String, String> credentials = new HashMap<>();
credentials.put(PasswordAuthenticator.USERNAME_KEY, user);
credentials.put(PasswordAuthenticator.PASSWORD_KEY, passwd);
AuthenticationRequest authenticationRequest = new AuthenticationRequest(credentials);
client.login(authenticationRequest);
}
return client;
}
}
static class LoaderOptions
{
public final File directory;
public boolean debug;
public boolean verbose;
public boolean noProgress;
public int rpcPort = 9160;
public String user;
public String passwd;
public int throttle = 0;
public int storagePort;
public int sslStoragePort;
public ITransportFactory transportFactory = new TFramedTransportFactory();
public EncryptionOptions encOptions = new EncryptionOptions.ClientEncryptionOptions();
public int connectionsPerHost = 1;
public EncryptionOptions.ServerEncryptionOptions serverEncOptions = new EncryptionOptions.ServerEncryptionOptions();
public final Set<InetAddress> hosts = new HashSet<>();
public final Set<InetAddress> ignores = new HashSet<>();
LoaderOptions(File directory)
{
this.directory = directory;
}
public static LoaderOptions parseArgs(String cmdArgs[])
{
CommandLineParser parser = new GnuParser();
CmdLineOptions options = getCmdLineOptions();
try
{
CommandLine cmd = parser.parse(options, cmdArgs, false);
if (cmd.hasOption(HELP_OPTION))
{
printUsage(options);
System.exit(0);
}
String[] args = cmd.getArgs();
if (args.length == 0)
{
System.err.println("Missing sstable directory argument");
printUsage(options);
System.exit(1);
}
if (args.length > 1)
{
System.err.println("Too many arguments");
printUsage(options);
System.exit(1);
}
String dirname = args[0];
File dir = new File(dirname);
if (!dir.exists())
errorMsg("Unknown directory: " + dirname, options);
if (!dir.isDirectory())
errorMsg(dirname + " is not a directory", options);
LoaderOptions opts = new LoaderOptions(dir);
opts.verbose = cmd.hasOption(VERBOSE_OPTION);
opts.noProgress = cmd.hasOption(NOPROGRESS_OPTION);
if (cmd.hasOption(RPC_PORT_OPTION))
opts.rpcPort = Integer.parseInt(cmd.getOptionValue(RPC_PORT_OPTION));
if (cmd.hasOption(USER_OPTION))
opts.user = cmd.getOptionValue(USER_OPTION);
if (cmd.hasOption(PASSWD_OPTION))
opts.passwd = cmd.getOptionValue(PASSWD_OPTION);
if (cmd.hasOption(INITIAL_HOST_ADDRESS_OPTION))
{
String[] nodes = cmd.getOptionValue(INITIAL_HOST_ADDRESS_OPTION).split(",");
try
{
for (String node : nodes)
{
opts.hosts.add(InetAddress.getByName(node.trim()));
}
}
catch (UnknownHostException e)
{
errorMsg("Unknown host: " + e.getMessage(), options);
}
}
else
{
System.err.println("Initial hosts must be specified (-d)");
printUsage(options);
System.exit(1);
}
if (cmd.hasOption(IGNORE_NODES_OPTION))
{
String[] nodes = cmd.getOptionValue(IGNORE_NODES_OPTION).split(",");
try
{
for (String node : nodes)
{
opts.ignores.add(InetAddress.getByName(node.trim()));
}
}
catch (UnknownHostException e)
{
errorMsg("Unknown host: " + e.getMessage(), options);
}
}
if (cmd.hasOption(CONNECTIONS_PER_HOST))
opts.connectionsPerHost = Integer.parseInt(cmd.getOptionValue(CONNECTIONS_PER_HOST));
// try to load config file first, so that values can be rewritten with other option values.
// otherwise use default config.
Config config;
if (cmd.hasOption(CONFIG_PATH))
{
File configFile = new File(cmd.getOptionValue(CONFIG_PATH));
if (!configFile.exists())
{
errorMsg("Config file not found", options);
}
config = new YamlConfigurationLoader().loadConfig(configFile.toURI().toURL());
}
else
{
config = new Config();
}
opts.storagePort = config.storage_port;
opts.sslStoragePort = config.ssl_storage_port;
opts.throttle = config.stream_throughput_outbound_megabits_per_sec;
opts.encOptions = config.client_encryption_options;
opts.serverEncOptions = config.server_encryption_options;
if (cmd.hasOption(THROTTLE_MBITS))
{
opts.throttle = Integer.parseInt(cmd.getOptionValue(THROTTLE_MBITS));
}
if (cmd.hasOption(SSL_TRUSTSTORE))
{
opts.encOptions.truststore = cmd.getOptionValue(SSL_TRUSTSTORE);
}
if (cmd.hasOption(SSL_TRUSTSTORE_PW))
{
opts.encOptions.truststore_password = cmd.getOptionValue(SSL_TRUSTSTORE_PW);
}
if (cmd.hasOption(SSL_KEYSTORE))
{
opts.encOptions.keystore = cmd.getOptionValue(SSL_KEYSTORE);
// if a keystore was provided, lets assume we'll need to use it
opts.encOptions.require_client_auth = true;
}
if (cmd.hasOption(SSL_KEYSTORE_PW))
{
opts.encOptions.keystore_password = cmd.getOptionValue(SSL_KEYSTORE_PW);
}
if (cmd.hasOption(SSL_PROTOCOL))
{
opts.encOptions.protocol = cmd.getOptionValue(SSL_PROTOCOL);
}
if (cmd.hasOption(SSL_ALGORITHM))
{
opts.encOptions.algorithm = cmd.getOptionValue(SSL_ALGORITHM);
}
if (cmd.hasOption(SSL_STORE_TYPE))
{
opts.encOptions.store_type = cmd.getOptionValue(SSL_STORE_TYPE);
}
if (cmd.hasOption(SSL_CIPHER_SUITES))
{
opts.encOptions.cipher_suites = cmd.getOptionValue(SSL_CIPHER_SUITES).split(",");
}
if (cmd.hasOption(TRANSPORT_FACTORY))
{
ITransportFactory transportFactory = getTransportFactory(cmd.getOptionValue(TRANSPORT_FACTORY));
configureTransportFactory(transportFactory, opts);
opts.transportFactory = transportFactory;
}
return opts;
}
catch (ParseException | ConfigurationException | MalformedURLException e)
{
errorMsg(e.getMessage(), options);
return null;
}
}
private static ITransportFactory getTransportFactory(String transportFactory)
{
try
{
Class<?> factory = Class.forName(transportFactory);
if (!ITransportFactory.class.isAssignableFrom(factory))
throw new IllegalArgumentException(String.format("transport factory '%s' " +
"not derived from ITransportFactory", transportFactory));
return (ITransportFactory) factory.newInstance();
}
catch (Exception e)
{
throw new IllegalArgumentException(String.format("Cannot create a transport factory '%s'.", transportFactory), e);
}
}
private static void configureTransportFactory(ITransportFactory transportFactory, LoaderOptions opts)
{
Map<String, String> options = new HashMap<>();
// If the supplied factory supports the same set of options as our SSL impl, set those
if (transportFactory.supportedOptions().contains(SSLTransportFactory.TRUSTSTORE))
options.put(SSLTransportFactory.TRUSTSTORE, opts.encOptions.truststore);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.TRUSTSTORE_PASSWORD))
options.put(SSLTransportFactory.TRUSTSTORE_PASSWORD, opts.encOptions.truststore_password);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.PROTOCOL))
options.put(SSLTransportFactory.PROTOCOL, opts.encOptions.protocol);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.CIPHER_SUITES))
options.put(SSLTransportFactory.CIPHER_SUITES, Joiner.on(',').join(opts.encOptions.cipher_suites));
if (transportFactory.supportedOptions().contains(SSLTransportFactory.KEYSTORE)
&& opts.encOptions.require_client_auth)
options.put(SSLTransportFactory.KEYSTORE, opts.encOptions.keystore);
if (transportFactory.supportedOptions().contains(SSLTransportFactory.KEYSTORE_PASSWORD)
&& opts.encOptions.require_client_auth)
options.put(SSLTransportFactory.KEYSTORE_PASSWORD, opts.encOptions.keystore_password);
// Now check if any of the factory's supported options are set as system properties
for (String optionKey : transportFactory.supportedOptions())
if (System.getProperty(optionKey) != null)
options.put(optionKey, System.getProperty(optionKey));
transportFactory.setOptions(options);
}
private static void errorMsg(String msg, CmdLineOptions options)
{
System.err.println(msg);
printUsage(options);
System.exit(1);
}
private static CmdLineOptions getCmdLineOptions()
{
CmdLineOptions options = new CmdLineOptions();
options.addOption("v", VERBOSE_OPTION, "verbose output");
options.addOption("h", HELP_OPTION, "display this help message");
options.addOption(null, NOPROGRESS_OPTION, "don't display progress");
options.addOption("i", IGNORE_NODES_OPTION, "NODES", "don't stream to this (comma separated) list of nodes");
options.addOption("d", INITIAL_HOST_ADDRESS_OPTION, "initial hosts", "Required. try to connect to these hosts (comma separated) initially for ring information");
options.addOption("p", RPC_PORT_OPTION, "rpc port", "port used for rpc (default 9160)");
options.addOption("t", THROTTLE_MBITS, "throttle", "throttle speed in Mbits (default unlimited)");
options.addOption("u", USER_OPTION, "username", "username for cassandra authentication");
options.addOption("pw", PASSWD_OPTION, "password", "password for cassandra authentication");
options.addOption("tf", TRANSPORT_FACTORY, "transport factory", "Fully-qualified ITransportFactory class name for creating a connection to cassandra");
options.addOption("cph", CONNECTIONS_PER_HOST, "connectionsPerHost", "number of concurrent connections-per-host.");
// ssl connection-related options
options.addOption("ts", SSL_TRUSTSTORE, "TRUSTSTORE", "Client SSL: full path to truststore");
options.addOption("tspw", SSL_TRUSTSTORE_PW, "TRUSTSTORE-PASSWORD", "Client SSL: password of the truststore");
options.addOption("ks", SSL_KEYSTORE, "KEYSTORE", "Client SSL: full path to keystore");
options.addOption("kspw", SSL_KEYSTORE_PW, "KEYSTORE-PASSWORD", "Client SSL: password of the keystore");
options.addOption("prtcl", SSL_PROTOCOL, "PROTOCOL", "Client SSL: connections protocol to use (default: TLS)");
options.addOption("alg", SSL_ALGORITHM, "ALGORITHM", "Client SSL: algorithm (default: SunX509)");
options.addOption("st", SSL_STORE_TYPE, "STORE-TYPE", "Client SSL: type of store");
options.addOption("ciphers", SSL_CIPHER_SUITES, "CIPHER-SUITES", "Client SSL: comma-separated list of encryption suites to use");
options.addOption("f", CONFIG_PATH, "path to config file", "cassandra.yaml file path for streaming throughput and client/server SSL.");
return options;
}
public static void printUsage(Options options)
{
String usage = String.format("%s [options] <dir_path>", TOOL_NAME);
String header = System.lineSeparator() +
"Bulk load the sstables found in the directory <dir_path> to the configured cluster." +
"The parent directories of <dir_path> are used as the target keyspace/table name. " +
"So for instance, to load an sstable named Standard1-g-1-Data.db into Keyspace1/Standard1, " +
"you will need to have the files Standard1-g-1-Data.db and Standard1-g-1-Index.db into a directory /path/to/Keyspace1/Standard1/.";
String footer = System.lineSeparator() +
"You can provide cassandra.yaml file with -f command line option to set up streaming throughput, client and server encryption options. " +
"Only stream_throughput_outbound_megabits_per_sec, server_encryption_options and client_encryption_options are read from yaml. " +
"You can override options read from cassandra.yaml with corresponding command line options.";
new HelpFormatter().printHelp(usage, header, options, footer);
}
}
public static class CmdLineOptions extends Options
{
/**
* Add option with argument and argument name
* @param opt shortcut for option name
* @param longOpt complete option name
* @param argName argument name
* @param description description of the option
* @return updated Options object
*/
public Options addOption(String opt, String longOpt, String argName, String description)
{
Option option = new Option(opt, longOpt, true, description);
option.setArgName(argName);
return addOption(option);
}
/**
* Add option without argument
* @param opt shortcut for option name
* @param longOpt complete option name
* @param description description of the option
* @return updated Options object
*/
public Options addOption(String opt, String longOpt, String description)
{
return addOption(new Option(opt, longOpt, false, description));
}
}
}
| |
package thaumcraft.api.visnet;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.HashMap;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.util.MovingObjectPosition.MovingObjectType;
import net.minecraft.util.Vec3;
import net.minecraft.world.World;
import thaumcraft.api.ThaumcraftApi;
import thaumcraft.api.ThaumcraftApiHelper;
import thaumcraft.api.WorldCoordinates;
import thaumcraft.api.aspects.Aspect;
public class VisNetHandler {
// NODE DRAINING
/**
* This method drains vis from a relay or source near the passed in
* location. The amount received can be less than the amount requested so
* take that into account.
*
* @param world
* @param x the x position of the draining block or entity
* @param y the y position of the draining block or entity
* @param z the z position of the draining block or entity
* @param aspect what aspect to drain
* @param amount how much to drain
* @return how much was actually drained
*/
public static int drainVis(World world, int x, int y, int z, Aspect aspect, int amount) {
int drainedAmount = 0;
WorldCoordinates drainer = new WorldCoordinates(x, y, z,
world.provider.dimensionId);
if (!nearbyNodes.containsKey(drainer)) {
calculateNearbyNodes(world, x, y, z);
}
ArrayList<WeakReference<TileVisNode>> nodes = nearbyNodes.get(drainer);
if (nodes!=null && nodes.size()>0)
for (WeakReference<TileVisNode> noderef : nodes) {
TileVisNode node = noderef.get();
if (node == null) continue;
int a = node.consumeVis(aspect, amount);
drainedAmount += a;
amount -= a;
if (a>0) {
int color = Aspect.getPrimalAspects().indexOf(aspect);
generateVisEffect(world.provider.dimensionId, x, y, z, node.xCoord, node.yCoord, node.zCoord, color);
}
if (amount <= 0) {
break;
}
}
return drainedAmount;
}
public static void generateVisEffect(int dim, int x, int y, int z, int x2, int y2, int z2, int color) {
ThaumcraftApi.internalMethods.generateVisEffect(dim, x, y, z, x2, y2, z2, color);
}
public static HashMap<Integer, HashMap<WorldCoordinates, WeakReference<TileVisNode>>> sources = new HashMap<Integer, HashMap<WorldCoordinates, WeakReference<TileVisNode>>>();
public static void addSource(World world, TileVisNode vs) {
HashMap<WorldCoordinates, WeakReference<TileVisNode>> sourcelist = sources
.get(world.provider.dimensionId);
if (sourcelist == null) {
sourcelist = new HashMap<WorldCoordinates, WeakReference<TileVisNode>>();
}
sourcelist.put(vs.getLocation(), new WeakReference(vs));
sources.put(world.provider.dimensionId, sourcelist);
nearbyNodes.clear();
}
public static boolean isNodeValid(WeakReference<TileVisNode> node) {
if (node == null || node.get() == null || node.get().isInvalid())
return false;
return true;
}
public static WeakReference<TileVisNode> addNode(World world, TileVisNode vn) {
WeakReference ref = new WeakReference(vn);
HashMap<WorldCoordinates, WeakReference<TileVisNode>> sourcelist = sources
.get(world.provider.dimensionId);
if (sourcelist == null) {
sourcelist = new HashMap<WorldCoordinates, WeakReference<TileVisNode>>();
return null;
}
ArrayList<Object[]> nearby = new ArrayList<Object[]>();
for (WeakReference<TileVisNode> root : sourcelist.values()) {
if (!isNodeValid(root))
continue;
TileVisNode source = root.get();
float r = inRange(world, vn.getLocation(), source.getLocation(),
vn.getRange());
if (r > 0) {
nearby.add(new Object[] { source, r - vn.getRange() * 2 });
}
nearby = findClosestNodes(vn, source, nearby);
cache.clear();
}
float dist = Float.MAX_VALUE;
TileVisNode closest = null;
if (nearby.size() > 0) {
for (Object[] o : nearby) {
if ((Float) o[1] < dist &&
(vn.getAttunement() == -1 || ((TileVisNode) o[0]).getAttunement() == -1 ||
vn.getAttunement() == ((TileVisNode) o[0]).getAttunement())//) {
&& canNodeBeSeen(vn,(TileVisNode)o[0])) {
dist = (Float) o[1];
closest = (TileVisNode) o[0];
}
}
}
if (closest != null) {
closest.getChildren().add(ref);
nearbyNodes.clear();
return new WeakReference(closest);
}
return null;
}
static ArrayList<WorldCoordinates> cache = new ArrayList<WorldCoordinates>();
public static ArrayList<Object[]> findClosestNodes(TileVisNode target,
TileVisNode parent, ArrayList<Object[]> in) {
if (cache.size() > 512 || cache.contains(new WorldCoordinates(parent))) return in;
cache.add(new WorldCoordinates(parent));
for (WeakReference<TileVisNode> childWR : parent.getChildren()) {
TileVisNode child = childWR.get();
if (child != null && !child.equals(target) && !child.equals(parent)) {
float r2 = inRange(child.getWorldObj(), child.getLocation(),
target.getLocation(), target.getRange());
if (r2 > 0) {
in.add(new Object[] { child, r2 });
}
in = findClosestNodes(target, child, in);
}
}
return in;
}
private static float inRange(World world, WorldCoordinates cc1,
WorldCoordinates cc2, int range) {
float distance = cc1.getDistanceSquaredToWorldCoordinates(cc2);
return distance > range * range ? -1 : distance;
}
private static HashMap<WorldCoordinates, ArrayList<WeakReference<TileVisNode>>> nearbyNodes = new HashMap<WorldCoordinates, ArrayList<WeakReference<TileVisNode>>>();
private static void calculateNearbyNodes(World world, int x, int y, int z) {
HashMap<WorldCoordinates, WeakReference<TileVisNode>> sourcelist = sources
.get(world.provider.dimensionId);
if (sourcelist == null) {
sourcelist = new HashMap<WorldCoordinates, WeakReference<TileVisNode>>();
return;
}
ArrayList<WeakReference<TileVisNode>> cn = new ArrayList<WeakReference<TileVisNode>>();
WorldCoordinates drainer = new WorldCoordinates(x, y, z,
world.provider.dimensionId);
ArrayList<Object[]> nearby = new ArrayList<Object[]>();
for (WeakReference<TileVisNode> root : sourcelist.values()) {
if (!isNodeValid(root))
continue;
TileVisNode source = root.get();
TileVisNode closest = null;
float range = Float.MAX_VALUE;
float r = inRange(world, drainer, source.getLocation(),
source.getRange());
if (r > 0) {
range = r;
closest = source;
}
ArrayList<WeakReference<TileVisNode>> children = new ArrayList<WeakReference<TileVisNode>>();
children = getAllChildren(source,children);
for (WeakReference<TileVisNode> child : children) {
TileVisNode n = child.get();
if (n != null && !n.equals(root)) {
float r2 = inRange(n.getWorldObj(), n.getLocation(),
drainer, n.getRange());
if (r2 > 0 && r2 < range) {
range = r2;
closest = n;
}
}
}
if (closest != null) {
cn.add(new WeakReference(closest));
}
}
nearbyNodes.put(drainer, cn);
}
private static ArrayList<WeakReference<TileVisNode>> getAllChildren(TileVisNode source, ArrayList<WeakReference<TileVisNode>> list) {
for (WeakReference<TileVisNode> child : source.getChildren()) {
TileVisNode n = child.get();
if (n != null) {
list.add(child);
list = getAllChildren(n,list);
}
}
return list;
}
public static boolean canNodeBeSeen(TileVisNode source,TileVisNode target)
{
MovingObjectPosition mop = ThaumcraftApiHelper.rayTraceIgnoringSource(source.getWorldObj(),
Vec3.createVectorHelper(source.xCoord+.5, source.yCoord+.5,source.zCoord+.5),
Vec3.createVectorHelper(target.xCoord+.5, target.yCoord+.5,target.zCoord+.5),
false, true, false);
return mop == null || (mop.typeOfHit==MovingObjectType.BLOCK &&
mop.blockX==target.xCoord && mop.blockY==target.yCoord && mop.blockZ==target.zCoord);
}
// public static HashMap<WorldCoordinates,WeakReference<TileVisNode>>
// noderef = new HashMap<WorldCoordinates,WeakReference<TileVisNode>>();
//
// public static TileVisNode getClosestNodeWithinRadius(World world, int x,
// int y, int z, int radius) {
// TileVisNode out = null;
// WorldCoordinates wc = null;
// float cd = Float.MAX_VALUE;
// for (int sx = x - radius; sx <= x + radius; sx++) {
// for (int sy = y - radius; sy <= y + radius; sy++) {
// for (int sz = z - radius; sz <= z + radius; sz++) {
// wc = new WorldCoordinates(sx,sy,sz,world.provider.dimensionId);
// if (noderef.containsKey(wc)) {
// float d = wc.getDistanceSquared(x, y, z);
// if (d<radius*radius && noderef.get(wc).get()!=null &&
// !noderef.get(wc).get().isReceiver() &&
// isNodeValid(noderef.get(wc).get().getParent())
// ) {
// out = noderef.get(wc).get();
// cd = d;
// }
// }
// }
// }
// }
// return out;
// }
}
| |
/*
* @(#)ReflectivityRecursiveParrat.java created 24/06/2013 Hermanville sur Mer
*
* Copyright (c) 2013 Luca Lutterotti All Rights Reserved.
*
* This software is the research result of Luca Lutterotti and it is
* provided as it is as confidential and proprietary information.
* You shall not disclose such Confidential Information and shall use
* it only in accordance with the terms of the license agreement you
* entered into with Luca Lutterotti.
*
* THE AUTHOR MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF THE
* SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE, OR NON-INFRINGEMENT. THE AUTHOR SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING
* THIS SOFTWARE OR ITS DERIVATIVES.
*
*/
package it.unitn.ing.rista.diffr.reflectivity;
import it.unitn.ing.rista.awt.JOptionsDialog;
import it.unitn.ing.rista.awt.JParameterListPane;
import it.unitn.ing.rista.diffr.*;
import it.unitn.ing.rista.util.*;
import javax.swing.*;
import java.awt.*;
import static java.lang.System.out;
/**
* The ReflectivityRecursiveParrat is a class to compute reflectivity using the
* Recursive method of Parrat method.
*
* @author Luca Lutterotti
* @version $Revision: 1.0 $, $Date: 2013/06/24 22:30:50 $
* @since JDK1.1
*/
public class ReflectivityRecursiveParrat extends Reflectivity {
static String idString = "Parrat recursive method";
static String idDescription = "Use the Parrat recursive method for reflectivity computation";
protected static String[] diclistc = {
"_maud_reflectivity_scale_factor"
};
protected static String[] diclistcrm = {
"_maud_reflectivity_scale_factor"
};
protected static String[] classlistc = {};
protected static String[] classlistcs = {};
public ReflectivityRecursiveParrat(XRDcat aobj, String alabel) {
super(aobj, alabel);
initXRD();
identifier = idString;
IDlabel = idString;
description = idDescription;
}
public ReflectivityRecursiveParrat(XRDcat aobj) {
this(aobj, idString);
}
public ReflectivityRecursiveParrat() {
identifier = idString;
IDlabel = idString;
description = idDescription;
}
public void initConstant() {
Nstring = 0;
Nstringloop = 0;
Nparameter = 1;
Nparameterloop = 0;
Nsubordinate = 0;
Nsubordinateloop = 0;
}
public void initDictionary() {
for (int i = 0; i < totsubordinateloop; i++)
diclist[i] = diclistc[i];
System.arraycopy(diclistcrm, 0, diclistRealMeaning, 0, totsubordinateloop);
for (int i = 0; i < totsubordinateloop - totsubordinate; i++)
classlist[i] = classlistc[i];
for (int i = 0; i < totsubordinate - totparameterloop; i++)
classlists[i] = classlistcs[i];
}
public void initParameters() {
super.initParameters();
parameterField[0] = new Parameter(this, getParameterString(0), 1.0,
ParameterPreferences.getDouble(getParameterString(0) + ".min", 0.1),
ParameterPreferences.getDouble(getParameterString(0) + ".max", 1E9));
}
public void computeReflectivity(Sample asample, DataFileSet adataset) {
int datafilenumber = adataset.activedatafilesnumber();
final Sample theSample = asample;
final DataFileSet theDataset = adataset;
final int maxThreads = Math.min(Constants.maxNumberOfThreads, datafilenumber);
if (maxThreads > 1 && Constants.threadingGranularity >= Constants.MEDIUM_GRANULARITY) {
if (Constants.debugThreads)
out.println("Thread datafileset " + getLabel());
int i;
PersistentThread[] threads = new PersistentThread[maxThreads];
for (i = 0; i < maxThreads; i++) {
threads[i] = new PersistentThread(i) {
@Override
public void executeJob() {
int i1 = this.getJobNumberStart();
int i2 = this.getJobNumberEnd();
for (int j = i1; j < i2; j++) {
computeReflectivity(theSample, theDataset.getActiveDataFile(j));
}
}
};
}
i = 0;
int istep = (int) (0.9999 + datafilenumber / maxThreads);
for (int j = 0; j < maxThreads; j++) {
int is = i;
if (j < maxThreads - 1)
i = Math.min(i + istep, datafilenumber);
else
i = datafilenumber;
threads[j].setJobRange(is, i);
threads[j].start();
}
boolean running;
do {
running = false;
try {
Thread.sleep(Constants.timeToWaitThreadsEnding);
} catch (InterruptedException r) {
}
for (int h = 0; h < maxThreads; h++) {
if (!threads[h].isEnded())
running = true;
}
} while (running);
} else
for (int k = 0; k < datafilenumber; k++)
computeReflectivity(theSample, theDataset.getActiveDataFile(k));
}
public void computeReflectivity(Sample asample, DiffrDataFile adatafile) {
// Radiation rad = adatafile.getDataFileSet().getRadiation();
// double lambda = rad.getWavelength().getValueD();
// double constant1 = 2.0 * Constants.PI / lambda * 1E8;
// double energy = Constants.ENERGY_LAMBDA / lambda; // in eV
// double energyInKeV = energy * 0.001;
Instrument ainstrument = adatafile.getDataFileSet().getInstrument();
double polarization = ainstrument.getGeometry().getPolarizationAmount();
double polarizationAngle = ainstrument.getGeometry().getPolarizationAngle();
double cos2polarization = MoreMath.cosd(polarizationAngle);
cos2polarization *= cos2polarization;
double s_factor = 0.5 - 0.5 * polarization * (1.0 - cos2polarization);
double p_factor = 0.5 - 0.5 * polarization * cos2polarization;
double incidentIntensity = ainstrument.getIntensityValue() * parameterValues[0];// * 1.0E8;
double reflectivity;
// double[] acr;
double[] cPerm;
int layersNumber = asample.numberOfLayers + 1;
double[][] Nz = new double[layersNumber][2];
double[][] Nz_eps = new double[layersNumber][2];
double[][] Rs = new double[layersNumber][2];
double[][] Ts = new double[layersNumber][2];
double[][] Rp = new double[layersNumber][2];
double[][] Tp = new double[layersNumber][2];
double[][] As = new double[layersNumber][2];
double[][] Ap = new double[layersNumber][2];
double[][] Xs = new double[layersNumber][2];
double[][] Xp = new double[layersNumber][2];
double[][] A2Xs = new double[layersNumber][2];
double[][] A2Xp = new double[layersNumber][2];
double[][] A2XRs = new double[layersNumber][2];
double[][] A2XRp = new double[layersNumber][2];
double[][] Er = new double[layersNumber][2];
double[][] Et = new double[layersNumber][2];
double[][] Hr = new double[layersNumber][2];
double[][] Ht = new double[layersNumber][2];
double[][] eps = new double[layersNumber][2];
RadiationType radType = ainstrument.getRadiationType();
int rad_lines = radType.getLinesCount();
for (int ej = 0; ej < rad_lines; ej++) {
double lambda = radType.getRadiationWavelength(ej);
double energy_intensity = radType.getRadiationWeigth(ej);
double constant1 = 2.0 * Constants.PI / lambda * 1E8;
double energy = Constants.ENERGY_LAMBDA / lambda; // in eV
double energyInKeV = energy * 0.001;
for (int j = 0; j < layersNumber; j++) {
Layer layer = asample.getlayer(j - 1);
double[] complexPermittivity = new double[2];
if (layer == null) {
complexPermittivity[0] = 0;
complexPermittivity[1] = 0;
} else {
cPerm = layer.getComplexPermittivityDiff(energyInKeV);
complexPermittivity[0] = cPerm[0];
complexPermittivity[1] = cPerm[1];
}
eps[j][0] = 1.0 - complexPermittivity[0];
eps[j][1] = -complexPermittivity[1];
}
for (int k = adatafile.startingindex; k < adatafile.finalindex; k++) {
double theta = adatafile.getXData(k) / 2;
theta *= Constants.DEGTOPI;
double cosPhi2 = Math.cos(theta);
cosPhi2 *= cosPhi2;
for (int j = 0; j < layersNumber; j++) {
Layer layer = asample.getlayer(j - 1);
double[] r1 = MoreMath.complexSqrt(eps[j][0] - cosPhi2, eps[j][1]);
Nz[j][0] = r1[0];
Nz[j][1] = r1[1];
r1 = MoreMath.complexDivide(Nz[j], eps[j]);
Nz_eps[j][0] = r1[0];
Nz_eps[j][1] = r1[1];
double thickness = 0;
if (layer != null && j != layersNumber - 1)
thickness = layer.getThicknessInCm();
thickness *= constant1;
r1 = MoreMath.complexExp(thickness * Nz[j][1], -thickness * Nz[j][0]);
As[j][0] = r1[0];
As[j][1] = r1[1];
r1 = MoreMath.complexExp(thickness * Nz_eps[j][1], -thickness * Nz_eps[j][0]);
Ap[j][0] = r1[0];
Ap[j][1] = r1[1];
}
for (int j = 0; j < layersNumber - 1; j++) {
Layer layer = asample.getlayer(j);
double roughness = 0;
if (layer != null)
roughness = layer.getRoughnessInCm();
double roughnessExponent = constant1 * roughness;
roughnessExponent *= roughnessExponent;
double[] diffN = MoreMath.complexAdd(Nz[j][0], Nz[j][1], -Nz[j + 1][0], -Nz[j + 1][1]);
double[] sumN = MoreMath.complexAdd(Nz[j], Nz[j + 1]);
double[] multN = MoreMath.complexMultiply(Nz[j], Nz[j + 1]);
double r1 = -2.0 * roughnessExponent;
multN[0] *= r1;
multN[1] *= r1;
double[] s1 = MoreMath.complexExp(multN);
double[] firstR = MoreMath.complexDivide(diffN, sumN);
firstR = MoreMath.complexMultiply(firstR, s1);
Rs[j][0] = firstR[0];
Rs[j][1] = firstR[1];
double r2 = roughnessExponent * 0.5;
firstR = MoreMath.complexMultiply(diffN, diffN);
firstR[0] *= r2;
firstR[1] *= r2;
s1 = MoreMath.complexExp(firstR);
firstR = MoreMath.complexDivide(Nz[j], sumN);
firstR = MoreMath.complexMultiply(firstR, s1);
Ts[j][0] = 2.0 * firstR[0];
Ts[j][1] = 2.0 * firstR[1];
diffN = MoreMath.complexAdd(Nz_eps[j][0], Nz_eps[j][1], -Nz_eps[j + 1][0], -Nz_eps[j + 1][1]);
sumN = MoreMath.complexAdd(Nz_eps[j], Nz_eps[j + 1]);
multN = MoreMath.complexMultiply(Nz_eps[j], Nz_eps[j + 1]);
multN[0] *= r1;
multN[1] *= r1;
s1 = MoreMath.complexExp(multN);
firstR = MoreMath.complexDivide(diffN, sumN);
firstR = MoreMath.complexMultiply(firstR, s1);
Rp[j][0] = firstR[0];
Rp[j][1] = firstR[1];
firstR = MoreMath.complexMultiply(diffN, diffN);
firstR[0] *= r2;
firstR[1] *= r2;
s1 = MoreMath.complexExp(firstR);
firstR = MoreMath.complexDivide(Nz_eps[j], sumN);
firstR = MoreMath.complexMultiply(firstR, s1);
Tp[j][0] = 2.0 * firstR[0];
Tp[j][1] = 2.0 * firstR[1];
}
for (int j = asample.numberOfLayers - 1; j >= 0; j--) {
double[] A2s = MoreMath.complexMultiply(As[j + 1], As[j + 1]);
double[] r1 = MoreMath.complexMultiply(A2s, Xs[j + 1]);
A2Xs[j + 1][0] = r1[0];
A2Xs[j + 1][1] = r1[1];
r1 = MoreMath.complexMultiply(A2Xs[j + 1], Rs[j]);
A2XRs[j + 1][0] = r1[0];
A2XRs[j + 1][1] = r1[1];
r1 = MoreMath.complexAdd(Rs[j], A2Xs[j + 1]);
r1 = MoreMath.complexDivide(r1[0], r1[1], 1.0 + A2XRs[j + 1][0], A2XRs[j + 1][1]);
Xs[j][0] = r1[0];
Xs[j][1] = r1[1];
double[] A2p = MoreMath.complexMultiply(Ap[j + 1], Ap[j + 1]);
r1 = MoreMath.complexMultiply(A2p, Xp[j + 1]);
A2Xp[j + 1][0] = r1[0];
A2Xp[j + 1][1] = r1[1];
r1 = MoreMath.complexMultiply(A2Xp[j + 1], Rp[j]);
A2XRp[j + 1][0] = r1[0];
A2XRp[j + 1][1] = r1[1];
r1 = MoreMath.complexAdd(Rp[j], A2Xp[j + 1]);
r1 = MoreMath.complexDivide(r1[0], r1[1], 1.0 + A2XRp[j + 1][0], A2XRp[j + 1][1]);
Xp[j][0] = r1[0];
Xp[j][1] = r1[1];
}
double[] A2s = MoreMath.complexMultiply(As[0], As[0]);
double[] r1 = MoreMath.complexMultiply(A2s, Xs[0]);
A2Xs[0][0] = r1[0];
A2Xs[0][1] = r1[1];
double[] A2p = MoreMath.complexMultiply(Ap[0], Ap[0]);
r1 = MoreMath.complexMultiply(A2p, Xp[0]);
A2Xp[0][0] = r1[0];
A2Xp[0][1] = r1[1];
Et[0][0] = 1.0;
Ht[0][0] = 1.0;
for (int j = 0; j < layersNumber - 1; j++) {
r1 = MoreMath.complexMultiply(As[j], Et[j]);
r1 = MoreMath.complexMultiply(r1, Ts[j]);
if (j != layersNumber - 2)
r1 = MoreMath.complexDivide(r1[0], r1[1], 1.0 + A2XRs[j + 1][0], A2XRs[j + 1][1]);
Et[j + 1][0] = r1[0];
Et[j + 1][1] = r1[1];
r1 = MoreMath.complexMultiply(A2Xs[j], Et[j]);
Er[j][0] = r1[0];
Er[j][1] = r1[1];
r1 = MoreMath.complexMultiply(Ap[j], Ht[j]);
r1 = MoreMath.complexMultiply(r1, Tp[j]);
if (j != layersNumber - 2)
r1 = MoreMath.complexDivide(r1[0], r1[1], 1.0 + A2XRp[j + 1][0], A2XRp[j + 1][1]);
Ht[j + 1][0] = r1[0];
Ht[j + 1][1] = r1[1];
r1 = MoreMath.complexMultiply(A2Xp[j], Ht[j]);
Hr[j][0] = r1[0];
Hr[j][1] = r1[1];
}
r1 = MoreMath.complexMultiply(A2Xs[layersNumber - 1], Et[layersNumber - 1]);
Er[layersNumber - 1][0] = r1[0];
Er[layersNumber - 1][1] = r1[1];
r1 = MoreMath.complexMultiply(A2Xp[layersNumber - 1], Ht[layersNumber - 1]);
Hr[layersNumber - 1][0] = r1[0];
Hr[layersNumber - 1][1] = r1[1];
double[] rE = MoreMath.complexConjugate(Er[0]);
rE = MoreMath.complexMultiply(Er[0], rE);
double Eint = s_factor * MoreMath.complexAbs(rE);
rE = MoreMath.complexConjugate(Hr[0]);
rE = MoreMath.complexMultiply(Hr[0], rE);
Eint += p_factor * MoreMath.complexAbs(rE);
reflectivity = incidentIntensity * Eint * energy_intensity;
adatafile.addtoPhasesFit(k, reflectivity);
}
}
adatafile.computeReflectivityBroadening(asample);
}
public JOptionsDialog getOptionsDialog(Frame parent) {
JOptionsDialog adialog = new JParratOptionsD(parent, this);
return adialog;
}
public class JParratOptionsD extends JOptionsDialog {
JTextField scaleFactorTF;
public JParratOptionsD(Frame parent, XRDcat obj) {
super(parent, obj);
principalPanel.setLayout(new BorderLayout(3, 3));
JPanel jp1 = new JPanel(new GridLayout(0, 2));
principalPanel.add(jp1, BorderLayout.NORTH);
JPanel jp2 = new JPanel(new FlowLayout());
jp2.add(new JLabel("Scale factor: "));
scaleFactorTF = new JTextField(Constants.FLOAT_FIELD);
scaleFactorTF.setToolTipText("Use the scale factor to balance with the diffraction intensity");
jp2.add(scaleFactorTF);
jp1.add(jp2);
setTitle("Parrat reflectivity options");
initParameters();
pack();
}
public void initParameters() {
scaleFactorTF.setText(parameterField[0].getValue());
addComponenttolist(scaleFactorTF, parameterField[0]);
}
public void retrieveParameters() {
parameterField[0].setValue(scaleFactorTF.getText());
}
public void dispose() {
super.dispose();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.converter;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.ObjectInput;
import java.io.ObjectInputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import org.apache.camel.Converter;
import org.apache.camel.Exchange;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Some core java.io based <a
* href="http://camel.apache.org/type-converter.html">Type Converters</a>
*
* @version $Revision$
*/
@Converter
public final class IOConverter {
private static final transient Log LOG = LogFactory.getLog(IOConverter.class);
/**
* Utility classes should not have a public constructor.
*/
private IOConverter() {
}
@Converter
public static InputStream toInputStream(URL url) throws IOException {
return url.openStream();
}
@Converter
public static InputStream toInputStream(File file) throws IOException {
return new BufferedInputStream(new FileInputStream(file));
}
@Deprecated
public static BufferedReader toReader(File file) throws IOException {
return toReader(file, null);
}
@Converter
public static BufferedReader toReader(File file, Exchange exchange) throws IOException {
return new BufferedReader(new EncodingFileReader(file, getCharsetName(exchange)));
}
@Converter
public static File toFile(String name) throws FileNotFoundException {
return new File(name);
}
@Converter
public static OutputStream toOutputStream(File file) throws FileNotFoundException {
return new BufferedOutputStream(new FileOutputStream(file));
}
@Deprecated
public static BufferedWriter toWriter(File file) throws IOException {
return toWriter(file, null);
}
@Converter
public static BufferedWriter toWriter(File file, Exchange exchange) throws IOException {
return new BufferedWriter(new EncodingFileWriter(file, getCharsetName(exchange)));
}
@Deprecated
public static Reader toReader(InputStream in) throws IOException {
return toReader(in, null);
}
@Converter
public static Reader toReader(InputStream in, Exchange exchange) throws IOException {
return new InputStreamReader(in, getCharsetName(exchange));
}
@Deprecated
public static Writer toWriter(OutputStream out) throws IOException {
return toWriter(out, null);
}
@Converter
@Deprecated
public static Writer toWriter(OutputStream out, Exchange exchange) throws IOException {
return new OutputStreamWriter(out, getCharsetName(exchange));
}
@Converter
public static StringReader toReader(String text) {
return new StringReader(text);
}
@Deprecated
public static InputStream toInputStream(String text) throws IOException {
return toInputStream(text, null);
}
@Converter
public static InputStream toInputStream(String text, Exchange exchange) throws IOException {
return toInputStream(text.getBytes(getCharsetName(exchange)));
}
@Deprecated
public static InputStream toInputStream(BufferedReader buffer) throws IOException {
return toInputStream(buffer, null);
}
@Converter
public static InputStream toInputStream(BufferedReader buffer, Exchange exchange) throws IOException {
return toInputStream(toString(buffer), exchange);
}
@Deprecated
public static String toString(byte[] data) throws IOException {
return toString(data, null);
}
@Converter
public static String toString(byte[] data, Exchange exchange) throws IOException {
return new String(data, getCharsetName(exchange));
}
@Deprecated
public static String toString(File file) throws IOException {
return toString(file, null);
}
@Converter
public static String toString(File file, Exchange exchange) throws IOException {
return toString(toReader(file, exchange));
}
@Converter
public static byte[] toByteArray(File file) throws IOException {
InputStream is = toInputStream(file);
try {
return toBytes(is);
} finally {
IOHelper.close(is, "file", LOG);
}
}
@Deprecated
public static byte[] toByteArray(Reader reader) throws IOException {
return toByteArray(reader, null);
}
@Converter
public static byte[] toByteArray(Reader reader, Exchange exchange) throws IOException {
if (reader instanceof BufferedReader) {
return toByteArray((BufferedReader)reader, exchange);
} else {
return toByteArray(new BufferedReader(reader), exchange);
}
}
@Deprecated
public static String toString(URL url) throws IOException {
return toString(url, null);
}
@Converter
public static String toString(URL url, Exchange exchange) throws IOException {
InputStream is = toInputStream(url);
try {
return toString(is, exchange);
} finally {
IOHelper.close(is, "url", LOG);
}
}
@Converter
public static String toString(Reader reader) throws IOException {
if (reader instanceof BufferedReader) {
return toString((BufferedReader)reader);
} else {
return toString(new BufferedReader(reader));
}
}
@Converter
public static String toString(BufferedReader reader) throws IOException {
if (reader == null) {
return null;
}
StringBuilder sb = new StringBuilder(1024);
char[] buf = new char[1024];
try {
int len = 0;
// read until we reach then end which is the -1 marker
while (len != -1) {
len = reader.read(buf);
if (len != -1) {
sb.append(buf, 0, len);
}
}
} finally {
IOHelper.close(reader, "reader", LOG);
}
return sb.toString();
}
@Deprecated
public static byte[] toByteArray(BufferedReader reader) throws IOException {
return toByteArray(reader, null);
}
@Converter
public static byte[] toByteArray(BufferedReader reader, Exchange exchange) throws IOException {
return toByteArray(toString(reader), exchange);
}
@Deprecated
public static byte[] toByteArray(String value) throws IOException {
return toByteArray(value, null);
}
@Converter
public static byte[] toByteArray(String value, Exchange exchange) throws IOException {
return value != null ? value.getBytes(getCharsetName(exchange)) : null;
}
@Deprecated
public static String toString(InputStream in) throws IOException {
return toString(in, null);
}
@Converter
public static String toString(InputStream in, Exchange exchange) throws IOException {
return toString(toReader(in, exchange));
}
@Converter
public static InputStream toInputStream(byte[] data) {
return new ByteArrayInputStream(data);
}
@Converter
public static ObjectOutput toObjectOutput(OutputStream stream) throws IOException {
if (stream instanceof ObjectOutput) {
return (ObjectOutput) stream;
} else {
return new ObjectOutputStream(stream);
}
}
@Converter
public static ObjectInput toObjectInput(InputStream stream) throws IOException {
if (stream instanceof ObjectInput) {
return (ObjectInput) stream;
} else {
return new ObjectInputStream(stream);
}
}
@Converter
public static byte[] toBytes(InputStream stream) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
IOHelper.copy(stream, bos);
return bos.toByteArray();
} finally {
IOHelper.close(bos, "stream", LOG);
}
}
@Converter
public static byte[] toByteArray(ByteArrayOutputStream os) {
return os.toByteArray();
}
@Deprecated
public static String toString(ByteArrayOutputStream os) throws IOException {
return toString(os, null);
}
@Converter
public static String toString(ByteArrayOutputStream os, Exchange exchange) throws IOException {
return os.toString(getCharsetName(exchange));
}
@Converter
public static InputStream toInputStream(ByteArrayOutputStream os) {
return new ByteArrayInputStream(os.toByteArray());
}
public static String getCharsetName(Exchange exchange) {
return getCharsetName(exchange, true);
}
/**
* Gets the charset name if set as property {@link Exchange#CHARSET_NAME}.
*
* @param exchange the exchange
* @param useDefault should we fallback and use JVM default charset if no property existed?
* @return the charset, or <tt>null</tt> if no found
*/
public static String getCharsetName(Exchange exchange, boolean useDefault) {
if (exchange != null) {
String charsetName = exchange.getProperty(Exchange.CHARSET_NAME, String.class);
if (charsetName != null) {
return IOConverter.normalizeCharset(charsetName);
}
}
if (useDefault) {
return getDefaultCharsetName();
} else {
return null;
}
}
public static String getDefaultCharsetName() {
return ObjectHelper.getSystemProperty(Exchange.DEFAULT_CHARSET_PROPERTY, "UTF-8");
}
/**
* Encoding-aware file reader.
*/
private static class EncodingFileReader extends InputStreamReader {
/**
* @param file file to read
* @param charset character set to use
*/
public EncodingFileReader(File file, String charset)
throws FileNotFoundException, UnsupportedEncodingException {
super(new FileInputStream(file), charset);
}
}
/**
* Encoding-aware file writer.
*/
private static class EncodingFileWriter extends OutputStreamWriter {
/**
* @param file file to write
* @param charset character set to use
*/
public EncodingFileWriter(File file, String charset)
throws FileNotFoundException, UnsupportedEncodingException {
super(new FileOutputStream(file), charset);
}
}
/**
* This method will take off the quotes and double quotes of the charset
*/
public static String normalizeCharset(String charset) {
if (charset != null) {
String answer = charset.trim();
if (answer.startsWith("'") || answer.startsWith("\"")) {
answer = answer.substring(1);
}
if (answer.endsWith("'") || answer.endsWith("\"")) {
answer = answer.substring(0, answer.length() - 1);
}
return answer.trim();
} else {
return null;
}
}
public static void validateCharset(String charset) throws UnsupportedCharsetException {
if (charset != null) {
if (Charset.isSupported(charset)) {
Charset.forName(charset);
return;
}
}
throw new UnsupportedCharsetException(charset);
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.core.network;
import com.flowpowered.math.vector.Vector3d;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.item.EntityXPOrb;
import net.minecraft.entity.passive.AbstractChestHorse;
import net.minecraft.entity.passive.EntityPig;
import net.minecraft.entity.passive.EntitySheep;
import net.minecraft.entity.passive.EntityWolf;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.entity.projectile.EntityArrow;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.inventory.Slot;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.NetHandlerPlayServer;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.Packet;
import net.minecraft.network.PacketThreadUtil;
import net.minecraft.network.datasync.DataParameter;
import net.minecraft.network.play.INetHandlerPlayServer;
import net.minecraft.network.play.client.CPacketClickWindow;
import net.minecraft.network.play.client.CPacketCreativeInventoryAction;
import net.minecraft.network.play.client.CPacketPlayer;
import net.minecraft.network.play.client.CPacketResourcePackStatus;
import net.minecraft.network.play.client.CPacketUpdateSign;
import net.minecraft.network.play.client.CPacketUseEntity;
import net.minecraft.network.play.client.CPacketVehicleMove;
import net.minecraft.network.play.server.SPacketEntityAttach;
import net.minecraft.network.play.server.SPacketMoveVehicle;
import net.minecraft.network.play.server.SPacketPlayerListItem;
import net.minecraft.network.play.server.SPacketResourcePackSend;
import net.minecraft.network.play.server.SPacketSetSlot;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.management.PlayerInteractionManager;
import net.minecraft.server.management.PlayerList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.tileentity.TileEntitySign;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.IntHashMap;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TextComponentString;
import net.minecraft.util.text.TextComponentTranslation;
import net.minecraft.world.WorldServer;
import org.apache.logging.log4j.Logger;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.block.tileentity.Sign;
import org.spongepowered.api.data.manipulator.mutable.tileentity.SignData;
import org.spongepowered.api.data.value.mutable.ListValue;
import org.spongepowered.api.entity.Transform;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.CauseStackManager;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.block.tileentity.ChangeSignEvent;
import org.spongepowered.api.event.cause.EventContextKeys;
import org.spongepowered.api.event.entity.MoveEntityEvent;
import org.spongepowered.api.event.item.inventory.ClickInventoryEvent;
import org.spongepowered.api.event.message.MessageEvent;
import org.spongepowered.api.event.network.ClientConnectionEvent;
import org.spongepowered.api.network.PlayerConnection;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.channel.MessageChannel;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.SpongeImplHooks;
import org.spongepowered.common.entity.player.tab.SpongeTabList;
import org.spongepowered.common.event.SpongeCommonEventFactory;
import org.spongepowered.common.event.tracking.PhaseTracker;
import org.spongepowered.common.event.tracking.PhaseData;
import org.spongepowered.common.event.tracking.phase.packet.PacketContext;
import org.spongepowered.common.event.tracking.phase.packet.PacketPhaseUtil;
import org.spongepowered.common.event.tracking.phase.tick.PlayerTickContext;
import org.spongepowered.common.event.tracking.phase.tick.TickPhase;
import org.spongepowered.common.interfaces.IMixinContainer;
import org.spongepowered.common.interfaces.IMixinNetworkManager;
import org.spongepowered.common.interfaces.entity.player.IMixinEntityPlayerMP;
import org.spongepowered.common.interfaces.entity.player.IMixinInventoryPlayer;
import org.spongepowered.common.interfaces.network.IMixinNetHandlerPlayServer;
import org.spongepowered.common.item.inventory.util.ItemStackUtil;
import org.spongepowered.common.text.SpongeTexts;
import org.spongepowered.common.util.VecHelper;
import java.net.InetSocketAddress;
import java.util.Deque;
import java.util.LinkedList;
import java.util.Optional;
import java.util.Set;
import javax.annotation.Nullable;
@Mixin(NetHandlerPlayServer.class)
public abstract class MixinNetHandlerPlayServer implements PlayerConnection, IMixinNetHandlerPlayServer {
private static final String UPDATE_SIGN = "Lnet/minecraft/network/play/client/CPacketUpdateSign;getLines()[Ljava/lang/String;";
@Shadow @Final private static Logger LOGGER;
@Shadow @Final public NetworkManager netManager;
@Shadow @Final private MinecraftServer serverController;
@Shadow @Final private IntHashMap<Short> pendingTransactions;
@Shadow public EntityPlayerMP player;
@Shadow private Entity lowestRiddenEnt;
@Shadow private int itemDropThreshold;
@Shadow private double firstGoodX;
@Shadow private double firstGoodY;
@Shadow private double firstGoodZ;
@Shadow private double lastGoodX;
@Shadow private double lastGoodY;
@Shadow private double lastGoodZ;
@Shadow private int lastPositionUpdate;
@Shadow private Vec3d targetPos;
@Shadow private int networkTickCount;
@Shadow private int movePacketCounter;
@Shadow private int lastMovePacketCounter;
@Shadow private boolean floating;
@Shadow public abstract void sendPacket(final Packet<?> packetIn);
@Shadow public abstract void disconnect(ITextComponent reason);
@Shadow private void captureCurrentPosition() {}
@Shadow public abstract void setPlayerLocation(double x, double y, double z, float yaw, float pitch);
@Shadow private static boolean isMovePlayerPacketInvalid(CPacketPlayer packetIn) { return false; } // Shadowed
private boolean justTeleported = false;
@Nullable private Location<World> lastMoveLocation = null;
private final Deque<SPacketResourcePackSend> resourcePackRequests = new LinkedList<>();
// Store the last block right-clicked
@Nullable private Item lastItem;
@Override
public void captureCurrentPlayerPosition() {
this.captureCurrentPosition();
}
@Override
public Deque<SPacketResourcePackSend> getPendingResourcePackQueue() {
return this.resourcePackRequests;
}
@Override
public Player getPlayer() {
return (Player) this.player;
}
@Override
public InetSocketAddress getAddress() {
return ((IMixinNetworkManager) this.netManager).getAddress();
}
@Override
public InetSocketAddress getVirtualHost() {
return ((IMixinNetworkManager) this.netManager).getVirtualHost();
}
@Override
public int getLatency() {
return this.player.ping;
}
@Redirect(method = "update", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayerMP;onUpdateEntity()V"))
private void onPlayerTick(EntityPlayerMP player) {
if (player.world.isRemote) {
player.onUpdateEntity();
return;
}
try (CauseStackManager.StackFrame frame = Sponge.getCauseStackManager().pushCauseFrame();
PlayerTickContext context = TickPhase.Tick.PLAYER.createPhaseContext()
.source(player)
.buildAndSwitch()) {
Sponge.getCauseStackManager().pushCause(player);
player.onUpdateEntity();
}
}
/**
* @param manager The player network connection
* @param packet The original packet to be sent
* @author kashike
*/
@Redirect(method = "sendPacket(Lnet/minecraft/network/Packet;)V", at = @At(value = "INVOKE", target = "Lnet/minecraft/network/NetworkManager;sendPacket(Lnet/minecraft/network/Packet;)V"))
public void onSendPacket(NetworkManager manager, Packet<?> packet) {
packet = this.rewritePacket(packet);
if (packet != null) {
manager.sendPacket(packet);
}
}
/**
* This method wraps packets being sent to perform any additional actions,
* such as rewriting data in the packet.
*
* @param packetIn The original packet to be sent
* @return The rewritten packet if we performed any changes, the original
* packet if we did not perform any changes, or {@code null} to not
* send anything
* @author kashike
*/
@Nullable
private Packet<?> rewritePacket(final Packet<?> packetIn) {
// Update the tab list data
if (packetIn instanceof SPacketPlayerListItem) {
((SpongeTabList) ((Player) this.player).getTabList()).updateEntriesOnSend((SPacketPlayerListItem) packetIn);
}
// Store the resource pack for use when processing resource pack statuses
else if (packetIn instanceof SPacketResourcePackSend) {
SPacketResourcePackSend packet = (SPacketResourcePackSend) packetIn;
if (this.resourcePackRequests.isEmpty()) {
this.resourcePackRequests.add(packet);
return packet;
}
if (this.resourcePackRequests.contains(packet)) {
// This must be a resend.
return packet;
}
this.resourcePackRequests.add(packet);
return null;
}
return packetIn;
}
/**
* @author Zidane
*
* Invoke before {@code System.arraycopy(packetIn.getLines(), 0, tileentitysign.signText, 0, 4);} (line 1156 in source) to call SignChangeEvent.
* @param packetIn Injected packet param
* @param ci Info to provide mixin on how to handle the callback
* @param worldserver Injected world param
* @param blockpos Injected blockpos param
* @param tileentity Injected tilentity param
* @param tileentitysign Injected tileentitysign param
*/
@Inject(method = "processUpdateSign", at = @At(value = "INVOKE", target = UPDATE_SIGN), cancellable = true, locals = LocalCapture.CAPTURE_FAILHARD)
public void callSignChangeEvent(CPacketUpdateSign packetIn, CallbackInfo ci, WorldServer worldserver, BlockPos blockpos, IBlockState iblockstate, TileEntity tileentity, TileEntitySign tileentitysign) {
ci.cancel();
final Optional<SignData> existingSignData = ((Sign) tileentitysign).get(SignData.class);
if (!existingSignData.isPresent()) {
// TODO Unsure if this is the best to do here...
throw new RuntimeException("Critical error! Sign data not present on sign!");
}
final SignData changedSignData = existingSignData.get().copy();
final ListValue<Text> lines = changedSignData.lines();
for (int i = 0; i < packetIn.getLines().length; i++) {
lines.set(i, SpongeTexts.toText(new TextComponentString(packetIn.getLines()[i])));
}
changedSignData.set(lines);
// I pass changedSignData in here twice to emulate the fact that even-though the current sign data doesn't have the lines from the packet
// applied, this is what it "is" right now. If the data shown in the world is desired, it can be fetched from Sign.getData
Sponge.getCauseStackManager().pushCause(this.player);
final ChangeSignEvent event =
SpongeEventFactory.createChangeSignEvent(Sponge.getCauseStackManager().getCurrentCause(),
changedSignData.asImmutable(), changedSignData, (Sign) tileentitysign);
if (!SpongeImpl.postEvent(event)) {
((Sign) tileentitysign).offer(event.getText());
} else {
// If cancelled, I set the data back that was fetched from the sign. This means that if its a new sign, the sign will be empty else
// it will be the text of the sign that was showing in the world
((Sign) tileentitysign).offer(existingSignData.get());
}
Sponge.getCauseStackManager().popCause();
tileentitysign.markDirty();
worldserver.getPlayerChunkMap().markBlockForUpdate(blockpos);
}
/**
* @author blood - June 6th, 2016
* @author gabizou - June 20th, 2016 - Update for 1.9.4 and minor refactors.
* @reason Since mojang handles creative packets different than survival, we need to
* restructure this method to prevent any packets being sent to client as we will
* not be able to properly revert them during drops.
*
* @param packetIn The creative inventory packet
*/
@Overwrite
public void processCreativeInventoryAction(CPacketCreativeInventoryAction packetIn) {
PacketThreadUtil.checkThreadAndEnqueue(packetIn, (NetHandlerPlayServer) (Object) this, this.player.getServerWorld());
if (this.player.interactionManager.isCreative()) {
final PhaseData peek = PhaseTracker.getInstance().getCurrentPhaseData();
final PacketContext<?> context = (PacketContext<?>) peek.context;
final boolean ignoresCreative = context.getIgnoringCreative();
boolean clickedOutside = packetIn.getSlotId() < 0;
ItemStack itemstack = packetIn.getStack();
if (!itemstack.isEmpty() && itemstack.hasTagCompound() && itemstack.getTagCompound().hasKey("BlockEntityTag", 10)) {
NBTTagCompound nbttagcompound = itemstack.getTagCompound().getCompoundTag("BlockEntityTag");
if (nbttagcompound.hasKey("x") && nbttagcompound.hasKey("y") && nbttagcompound.hasKey("z")) {
BlockPos blockpos = new BlockPos(nbttagcompound.getInteger("x"), nbttagcompound.getInteger("y"), nbttagcompound.getInteger("z"));
TileEntity tileentity = this.player.world.getTileEntity(blockpos);
if (tileentity != null) {
NBTTagCompound nbttagcompound1 = new NBTTagCompound();
tileentity.writeToNBT(nbttagcompound1);
nbttagcompound1.removeTag("x");
nbttagcompound1.removeTag("y");
nbttagcompound1.removeTag("z");
itemstack.setTagInfo("BlockEntityTag", nbttagcompound1);
}
}
}
boolean clickedHotbar = packetIn.getSlotId() >= 1 && packetIn.getSlotId() <= 45;
boolean itemValidCheck = itemstack.isEmpty() || itemstack.getMetadata() >= 0 && itemstack.getCount() <= 64 && !itemstack.isEmpty();
// Sponge start - handle CreativeInventoryEvent
if (itemValidCheck) {
if (!ignoresCreative) {
ClickInventoryEvent.Creative clickEvent = SpongeCommonEventFactory.callCreativeClickInventoryEvent(this.player, packetIn);
if (clickEvent.isCancelled()) {
// Reset slot on client
if (packetIn.getSlotId() >= 0 && packetIn.getSlotId() < this.player.inventoryContainer.inventorySlots.size()) {
this.player.connection.sendPacket(
new SPacketSetSlot(this.player.inventoryContainer.windowId, packetIn.getSlotId(),
this.player.inventoryContainer.getSlot(packetIn.getSlotId()).getStack()));
this.player.connection.sendPacket(new SPacketSetSlot(-1, -1, ItemStack.EMPTY));
}
return;
}
}
if (clickedHotbar) {
if (itemstack.isEmpty()) {
this.player.inventoryContainer.putStackInSlot(packetIn.getSlotId(), ItemStack.EMPTY);
} else {
this.player.inventoryContainer.putStackInSlot(packetIn.getSlotId(), itemstack);
}
this.player.inventoryContainer.setCanCraft(this.player, true);
} else if (clickedOutside && this.itemDropThreshold < 200) {
this.itemDropThreshold += 20;
EntityItem entityitem = this.player.dropItem(itemstack, true);
if (entityitem != null)
{
entityitem.setAgeToCreativeDespawnTime();
}
}
}
// Sponge end
}
}
@Inject(method = "processClickWindow", at = @At(value = "INVOKE", target = "Lnet/minecraft/util/IntHashMap;addKey(ILjava/lang/Object;)V"))
public void onInvalidClick(CPacketClickWindow packet, CallbackInfo ci) {
// We want to treat an 'invalid' click just like a regular click - we still fire events, do restores, etc.
// Vanilla doesn't call detectAndSendChanges for 'invalid' clicks, since it restores the entire inventory
// Passing 'captureOnly' as 'true' allows capturing to happen for event firing, but doesn't send any pointless packets
((IMixinContainer) this.player.openContainer).detectAndSendChanges(true);
}
@Redirect(method = "processChatMessage", at = @At(value = "INVOKE", target = "Lorg/apache/commons/lang3/StringUtils;normalizeSpace(Ljava/lang/String;)Ljava/lang/String;", remap = false))
public String onNormalizeSpace(String input) {
return input;
}
@Inject(method = "setPlayerLocation(DDDFFLjava/util/Set;)V", at = @At(value = "RETURN"))
public void setPlayerLocation(double x, double y, double z, float yaw, float pitch, Set<?> relativeSet, CallbackInfo ci) {
this.justTeleported = true;
}
/**
* @author gabizou - June 22nd, 2016
* @reason Sponge has to throw the movement events before we consider moving the player and there's
* no clear way to go about it with the target position being null and the last position update checks.
* @param packetIn
*/
@Redirect(method = "processPlayer", at = @At(value = "FIELD", target = "Lnet/minecraft/entity/player/EntityPlayerMP;queuedEndExit:Z"))
private boolean throwMoveEvent(EntityPlayerMP playerMP, CPacketPlayer packetIn) {
if (!playerMP.queuedEndExit) {
// During login, minecraft sends a packet containing neither the 'moving' or 'rotating' flag set - but only once.
// We don't fire an event to avoid confusing plugins.
if (!packetIn.moving && !packetIn.rotating) {
return playerMP.queuedEndExit;
}
// Sponge Start - Movement event
Player player = (Player) this.player;
IMixinEntityPlayerMP mixinPlayer = (IMixinEntityPlayerMP) this.player;
Vector3d fromrot = player.getRotation();
// If Sponge used the player's current location, the delta might never be triggered which could be exploited
Location<World> from = player.getLocation();
if (this.lastMoveLocation != null) {
from = this.lastMoveLocation;
}
Vector3d torot = new Vector3d(packetIn.pitch, packetIn.yaw, 0);
Location<World> to = new Location<>(player.getWorld(), packetIn.x, packetIn.y, packetIn.z);
// Minecraft sends a 0, 0, 0 position when rotation only update occurs, this needs to be recognized and corrected
boolean rotationOnly = !packetIn.moving && packetIn.rotating;
if (rotationOnly) {
// Correct the to location so it's not misrepresented to plugins, only when player rotates without moving
// In this case it's only a rotation update, which isn't related to the to location
from = player.getLocation();
to = from;
}
// Minecraft does the same with rotation when it's only a positional update
boolean positionOnly = packetIn.moving && !packetIn.rotating;
if (positionOnly) {
// Correct the new rotation to match the old rotation
torot = fromrot;
}
((IMixinEntityPlayerMP) this.player).setVelocityOverride(to.getPosition().sub(from.getPosition()));
double deltaSquared = to.getPosition().distanceSquared(from.getPosition());
double deltaAngleSquared = fromrot.distanceSquared(torot);
// These magic numbers are sad but help prevent excessive lag from this event.
// eventually it would be nice to not have them
if (deltaSquared > ((1f / 16) * (1f / 16)) || deltaAngleSquared > (.15f * .15f)) {
Transform<World> fromTransform = player.getTransform().setLocation(from).setRotation(fromrot);
Transform<World> toTransform = player.getTransform().setLocation(to).setRotation(torot);
Sponge.getCauseStackManager().pushCause(player);
MoveEntityEvent event = SpongeEventFactory.createMoveEntityEvent(Sponge.getCauseStackManager().getCurrentCause(), fromTransform, toTransform, player);
SpongeImpl.postEvent(event);
Sponge.getCauseStackManager().popCause();
if (event.isCancelled()) {
mixinPlayer.setLocationAndAngles(fromTransform);
this.lastMoveLocation = from;
((IMixinEntityPlayerMP) this.player).setVelocityOverride(null);
return true;
} else if (!event.getToTransform().equals(toTransform)) {
mixinPlayer.setLocationAndAngles(event.getToTransform());
this.lastMoveLocation = event.getToTransform().getLocation();
((IMixinEntityPlayerMP) this.player).setVelocityOverride(null);
return true;
} else if (!from.equals(player.getLocation()) && this.justTeleported) {
this.lastMoveLocation = player.getLocation();
// Prevent teleports during the move event from causing odd behaviors
this.justTeleported = false;
((IMixinEntityPlayerMP) this.player).setVelocityOverride(null);
return true;
} else {
this.lastMoveLocation = event.getToTransform().getLocation();
}
this.resendLatestResourcePackRequest();
}
}
return playerMP.queuedEndExit;
}
/**
* @author gabizou - June 22nd, 2016
* @author blood - May 6th, 2017
* @reason Redirects the {@link Entity#getLowestRidingEntity()} call to throw our
* {@link MoveEntityEvent}. The peculiarity of this redirect is that the entity
* returned is perfectly valid to be {@link this#player} since, if the player
* is NOT riding anything, the lowest riding entity is themselves. This way, if
* the event is cancelled, the player can be returned instead of the actual riding
* entity.
*
* @param playerMP The player
* @param packetIn The packet movement
* @return The lowest riding entity
*/
@Redirect(method = "processVehicleMove", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayerMP;getLowestRidingEntity()Lnet/minecraft/entity/Entity;"))
private Entity processVehicleMoveEvent(EntityPlayerMP playerMP, CPacketVehicleMove packetIn) {
final Entity ridingEntity = this.player.getLowestRidingEntity();
if (ridingEntity == this.player || ridingEntity.getControllingPassenger() != this.player || ridingEntity != this.lowestRiddenEnt) {
return ridingEntity;
}
// Sponge Start - Movement event
org.spongepowered.api.entity.Entity spongeEntity = (org.spongepowered.api.entity.Entity) ridingEntity;
Vector3d fromrot = spongeEntity.getRotation();
Location<World> from = spongeEntity.getLocation();
Vector3d torot = new Vector3d(packetIn.getPitch(), packetIn.getYaw(), 0);
Location<World> to = new Location<>(spongeEntity.getWorld(), packetIn.getX(), packetIn.getY(), packetIn.getZ());
Transform<World> fromTransform = spongeEntity.getTransform().setLocation(from).setRotation(fromrot);
Transform<World> toTransform = spongeEntity.getTransform().setLocation(to).setRotation(torot);
MoveEntityEvent event = SpongeEventFactory.createMoveEntityEvent(Sponge.getCauseStackManager().getCurrentCause(), fromTransform, toTransform, this.getPlayer());
SpongeImpl.postEvent(event);
if (event.isCancelled()) {
// There is no need to change the current riding entity position as it hasn't changed yet.
// Send packet to client in order to update rider position.
this.netManager.sendPacket(new SPacketMoveVehicle(ridingEntity));
return this.player;
}
return ridingEntity;
}
@Redirect(method = "onDisconnect", at = @At(value = "INVOKE",
target = "Lnet/minecraft/server/management/PlayerList;sendMessage(Lnet/minecraft/util/text/ITextComponent;)V"))
public void onDisconnectHandler(PlayerList this$0, ITextComponent component) {
// If this happens, the connection has not been fully established yet so we've kicked them during ClientConnectionEvent.Login,
// but FML has created this handler earlier to send their handshake. No message should be sent, no disconnection event should
// be fired either.
if (this.player.connection == null) {
return;
}
final Player player = ((Player) this.player);
final Text message = SpongeTexts.toText(component);
final MessageChannel originalChannel = player.getMessageChannel();
Sponge.getCauseStackManager().pushCause(player);
final ClientConnectionEvent.Disconnect event = SpongeEventFactory.createClientConnectionEventDisconnect(
Sponge.getCauseStackManager().getCurrentCause(), originalChannel, Optional.of(originalChannel), new MessageEvent.MessageFormatter(message),
player, false
);
SpongeImpl.postEvent(event);
Sponge.getCauseStackManager().popCause();
if (!event.isMessageCancelled()) {
event.getChannel().ifPresent(channel -> channel.send(player, event.getMessage()));
}
((IMixinEntityPlayerMP) this.player).getWorldBorderListener().onPlayerDisconnect();
}
@Redirect(method = "processTryUseItemOnBlock", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/management/PlayerInteractionManager;processRightClickBlock(Lnet/minecraft/entity/player/EntityPlayer;Lnet/minecraft/world/World;Lnet/minecraft/item/ItemStack;Lnet/minecraft/util/EnumHand;Lnet/minecraft/util/math/BlockPos;Lnet/minecraft/util/EnumFacing;FFF)Lnet/minecraft/util/EnumActionResult;"))
public EnumActionResult onProcessRightClickBlock(PlayerInteractionManager interactionManager, EntityPlayer player, net.minecraft.world.World worldIn, @Nullable ItemStack stack, EnumHand hand, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ) {
EnumActionResult actionResult = interactionManager.processRightClickBlock(this.player, worldIn, stack, hand, pos, facing, hitX, hitY, hitZ);
// If result is not SUCCESS, we need to avoid throwing an InteractBlockEvent.Secondary for AIR
// since the client will send the server a CPacketTryUseItem right after this packet is done processing.
if (actionResult != EnumActionResult.SUCCESS) {
//SpongeCommonEventFactory.ignoreRightClickAirEvent = true;
// If a plugin or mod has changed the item, avoid restoring
if (!SpongeCommonEventFactory.playerInteractItemChanged) {
final PhaseTracker phaseTracker = PhaseTracker.getInstance();
final PhaseData peek = phaseTracker.getCurrentPhaseData();
final ItemStack itemStack = ItemStackUtil.toNative(((PacketContext<?>) peek.context).getItemUsed());
// Only do a restore if something actually changed. The client does an identity check ('==')
// to determine if it should continue using an itemstack. If we always resend the itemstack, we end up
// cancelling item usage (e.g. eating food) that occurs while targeting a block
if (!ItemStack.areItemStacksEqual(itemStack, player.getHeldItem(hand)) || SpongeCommonEventFactory.interactBlockEventCancelled) {
PacketPhaseUtil.handlePlayerSlotRestore((EntityPlayerMP) player, itemStack, hand);
}
}
}
SpongeCommonEventFactory.playerInteractItemChanged = false;
SpongeCommonEventFactory.interactBlockEventCancelled = false;
return actionResult;
}
@Nullable
@Redirect(method = "processPlayerDigging", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayerMP;dropItem(Z)Lnet/minecraft/entity/item/EntityItem;"))
public EntityItem onPlayerDropItem(EntityPlayerMP player, boolean dropAll) {
EntityItem item = null;
ItemStack stack = this.player.inventory.getCurrentItem();
if (!stack.isEmpty()) {
int size = stack.getCount();
item = this.player.dropItem(dropAll);
// force client itemstack update if drop event was cancelled
if (item == null) {
Slot slot = this.player.openContainer.getSlotFromInventory(this.player.inventory, this.player.inventory.currentItem);
int windowId = this.player.openContainer.windowId;
stack.setCount(size);
this.sendPacket(new SPacketSetSlot(windowId, slot.slotNumber, stack));
}
}
return item;
}
/**
* Attempts to find the {@link DataParameter} that was potentially modified
* when a player interacts with an entity.
*
* @param stack The item the player is holding
* @param entity The entity
* @return A possible data parameter or null if unknown
*/
@Nullable
private static DataParameter<?> findModifiedEntityInteractDataParameter(ItemStack stack, Entity entity) {
Item item = stack.getItem();
if (item == Items.DYE) {
// ItemDye.itemInteractionForEntity
if (entity instanceof EntitySheep) {
return EntitySheep.DYE_COLOR;
}
// EntityWolf.processInteract
if (entity instanceof EntityWolf) {
return EntityWolf.COLLAR_COLOR;
}
return null;
}
if (item == Items.NAME_TAG) {
// ItemNameTag.itemInteractionForEntity
return entity instanceof EntityLivingBase && !(entity instanceof EntityPlayer) && stack.hasDisplayName() ? Entity.CUSTOM_NAME : null;
}
if (item == Items.SADDLE) {
// ItemSaddle.itemInteractionForEntity
return entity instanceof EntityPig ? EntityPig.SADDLED : null;
}
if (item instanceof ItemBlock && ((ItemBlock) item).getBlock() == Blocks.CHEST) {
// AbstractChestHorse.processInteract
return entity instanceof AbstractChestHorse ? AbstractChestHorse.DATA_ID_CHEST : null;
}
return null;
}
/**
* @author blood - April 5th, 2016
*
* @reason Due to all the changes we now do for this packet, it is much easier
* to read it all with an overwrite. Information detailing on why each change
* was made can be found in comments below.
*
* @param packetIn The entity use packet
*/
@Overwrite
public void processUseEntity(CPacketUseEntity packetIn) {
// Sponge start
// All packets received by server are handled first on the Netty Thread
if (!SpongeImpl.getServer().isCallingFromMinecraftThread()) {
if (packetIn.getAction() == CPacketUseEntity.Action.INTERACT) {
// This packet is only sent by client when CPacketUseEntity.Action.INTERACT_AT is
// not successful. We can safely ignore this packet as we handle the INTERACT logic
// when INTERACT_AT does not return a successful result.
return;
} else { // queue packet for main thread
PacketThreadUtil.checkThreadAndEnqueue(packetIn, (NetHandlerPlayServer) (Object) this, this.player.getServerWorld());
return;
}
}
// Sponge end
WorldServer worldserver = this.serverController.getWorld(this.player.dimension);
Entity entity = packetIn.getEntityFromWorld(worldserver);
this.player.markPlayerActive();
if (entity != null) {
boolean flag = this.player.canEntityBeSeen(entity);
double d0 = 36.0D; // 6 blocks
if (!flag) {
d0 = 9.0D; // 1.5 blocks
}
if (this.player.getDistanceSq(entity) < d0) {
// Sponge start - Ignore CPacketUseEntity.Action.INTERACT
/*if (packetIn.getAction() == CPacketUseEntity.Action.INTERACT) {
// The client will only send this packet if INTERACT_AT is not successful.
// We can safely ignore this as we handle interactOn below during INTERACT_AT.
//EnumHand enumhand = packetIn.getHand();
//this.player.interactOn(entity, enumhand);
} else */
// Sponge end
if (packetIn.getAction() == CPacketUseEntity.Action.INTERACT_AT) {
// Sponge start - Fire interact events
EnumHand hand = packetIn.getHand();
ItemStack itemstack = hand != null ? this.player.getHeldItem(hand) : ItemStack.EMPTY;
Sponge.getCauseStackManager().addContext(EventContextKeys.USED_ITEM, ItemStackUtil.snapshotOf(itemstack));
SpongeCommonEventFactory.lastSecondaryPacketTick = this.serverController.getTickCounter();
// Is interaction allowed with item in hand
if (SpongeCommonEventFactory.callInteractItemEventSecondary(this.player, itemstack, hand, VecHelper.toVector3d(packetIn
.getHitVec()), entity).isCancelled() || SpongeCommonEventFactory.callInteractEntityEventSecondary(this.player,
entity, hand, VecHelper.toVector3d(packetIn.getHitVec())).isCancelled()) {
// Restore held item in hand
int index = ((IMixinInventoryPlayer) this.player.inventory).getHeldItemIndex(hand);
Slot slot = this.player.openContainer.getSlotFromInventory(this.player.inventory, index);
sendPacket(new SPacketSetSlot(this.player.openContainer.windowId, slot.slotNumber, itemstack));
// Handle a few special cases where the client assumes that the interaction is successful,
// which means that we need to force an update
if (itemstack.getItem() == Items.LEAD) {
// Detach entity again
sendPacket(new SPacketEntityAttach(entity, null));
} else {
// Other cases may involve a specific DataParameter of the entity
// We fix the client state by marking it as dirty so it will be updated on the client the next tick
DataParameter<?> parameter = findModifiedEntityInteractDataParameter(itemstack, entity);
if (parameter != null) {
entity.getDataManager().setDirty(parameter);
}
}
return;
}
// If INTERACT_AT is not successful, run the INTERACT logic
if (entity.applyPlayerInteraction(this.player, packetIn.getHitVec(), hand) != EnumActionResult.SUCCESS) {
this.player.interactOn(entity, hand);
}
// Sponge end
} else if (packetIn.getAction() == CPacketUseEntity.Action.ATTACK) {
// Sponge start - Call interact event
EnumHand hand = EnumHand.MAIN_HAND; // Will be null in the packet during ATTACK
ItemStack itemstack = this.player.getHeldItem(hand);
SpongeCommonEventFactory.lastPrimaryPacketTick = this.serverController.getTickCounter();
Vector3d hitVec = null;
if (packetIn.getHitVec() == null) {
final RayTraceResult result = SpongeImplHooks.rayTraceEyes(player, SpongeImplHooks.getBlockReachDistance(player));
hitVec = result == null ? null : VecHelper.toVector3d(result.hitVec);
}
if (SpongeCommonEventFactory.callInteractItemEventPrimary(this.player, itemstack, hand, hitVec, entity).isCancelled()) {
((IMixinEntityPlayerMP) this.player).restorePacketItem(hand);
return;
}
// Sponge end
if (entity instanceof EntityItem || entity instanceof EntityXPOrb || entity instanceof EntityArrow || entity == this.player) {
this.disconnect(new TextComponentTranslation("multiplayer.disconnect.invalid_entity_attacked"));
this.serverController.logWarning("Player " + this.player.getName() + " tried to attack an invalid entity");
return;
}
// Sponge start
if (entity instanceof Player && !((World) this.player.world).getProperties().isPVPEnabled()) {
return; // PVP is disabled, ignore
}
if (SpongeCommonEventFactory.callInteractEntityEventPrimary(this.player, entity, hand, hitVec).isCancelled()) {
((IMixinEntityPlayerMP) this.player).restorePacketItem(hand);
return;
}
// Sponge end
this.player.attackTargetEntityWithCurrentItem(entity);
}
}
}
}
@Override
public void setLastMoveLocation(Location<World> location) {
this.lastMoveLocation = location;
}
@Inject(method = "handleResourcePackStatus(Lnet/minecraft/network/play/client/CPacketResourcePackStatus;)V", at = @At("HEAD"))
private void onProcessResourcePackStatus(CPacketResourcePackStatus packet, CallbackInfo ci) {
// Propagate the packet to the main thread so the cause tracker picks
// it up. See MixinPacketThreadUtil.
PacketThreadUtil.checkThreadAndEnqueue(packet, (INetHandlerPlayServer) this, this.player.getServerWorld());
}
@Override
public void resendLatestResourcePackRequest() {
// The vanilla client doesn't send any resource pack status if the user presses Escape to close the prompt.
// If the user moves around, they must have closed the GUI, so resend it to get a real answer.
if (!this.resourcePackRequests.isEmpty()) {
this.sendPacket(this.resourcePackRequests.peek());
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.update;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.delete.TransportDeleteAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.index.TransportIndexAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.PlainShardIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.engine.DocumentAlreadyExistsException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.Collections;
import java.util.Map;
/**
*/
public class TransportUpdateAction extends TransportInstanceSingleOperationAction<UpdateRequest, UpdateResponse> {
private final TransportDeleteAction deleteAction;
private final TransportIndexAction indexAction;
private final AutoCreateIndex autoCreateIndex;
private final TransportCreateIndexAction createIndexAction;
private final UpdateHelper updateHelper;
private final IndicesService indicesService;
@Inject
public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
TransportIndexAction indexAction, TransportDeleteAction deleteAction, TransportCreateIndexAction createIndexAction,
UpdateHelper updateHelper, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
IndicesService indicesService, AutoCreateIndex autoCreateIndex) {
super(settings, UpdateAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpdateRequest::new);
this.indexAction = indexAction;
this.deleteAction = deleteAction;
this.createIndexAction = createIndexAction;
this.updateHelper = updateHelper;
this.indicesService = indicesService;
this.autoCreateIndex = autoCreateIndex;
}
@Override
protected String executor() {
return ThreadPool.Names.INDEX;
}
@Override
protected UpdateResponse newResponse() {
return new UpdateResponse();
}
@Override
protected boolean retryOnFailure(Throwable e) {
return TransportActions.isShardNotAvailableException(e);
}
@Override
protected boolean resolveRequest(ClusterState state, UpdateRequest request, ActionListener<UpdateResponse> listener) {
request.routing((state.metaData().resolveIndexRouting(request.routing(), request.index())));
// Fail fast on the node that received the request, rather than failing when translating on the index or delete request.
if (request.routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.type())) {
throw new RoutingMissingException(request.concreteIndex(), request.type(), request.id());
}
return true;
}
@Override
protected void doExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
// if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API
if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) {
createIndexAction.execute(new CreateIndexRequest(request).index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
try {
innerExecute(request, listener);
} catch (Throwable e1) {
listener.onFailure(e1);
}
} else {
listener.onFailure(e);
}
}
});
} else {
innerExecute(request, listener);
}
}
private void innerExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected ShardIterator shards(ClusterState clusterState, UpdateRequest request) {
if (request.shardId() != -1) {
return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId()).primaryShardIt();
}
ShardIterator shardIterator = clusterService.operationRouting()
.indexShards(clusterState, request.concreteIndex(), request.type(), request.id(), request.routing());
ShardRouting shard;
while ((shard = shardIterator.nextOrNull()) != null) {
if (shard.primary()) {
return new PlainShardIterator(shardIterator.shardId(), Collections.singletonList(shard));
}
}
return new PlainShardIterator(shardIterator.shardId(), Collections.<ShardRouting>emptyList());
}
@Override
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
shardOperation(request, listener, 0);
}
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener, final int retryCount) {
IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex());
IndexShard indexShard = indexService.shardSafe(request.shardId());
final UpdateHelper.Result result = updateHelper.prepare(request, indexShard);
switch (result.operation()) {
case UPSERT:
IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action(), request);
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference upsertSourceBytes = upsertRequest.source();
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
if (request.fields() != null && request.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
} else {
update.setGetResult(null);
}
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException || e instanceof DocumentAlreadyExistsException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) {
@Override
protected void doRun() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case INDEX:
IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action(), request);
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference indexSourceBytes = indexRequest.source();
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) {
@Override
protected void doRun() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case DELETE:
DeleteRequest deleteRequest = new DeleteRequest((DeleteRequest)result.action(), request);
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), false);
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) {
@Override
protected void doRun() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case NONE:
UpdateResponse update = result.action();
IndexService indexServiceOrNull = indicesService.indexService(request.concreteIndex());
if (indexServiceOrNull != null) {
IndexShard shard = indexService.shard(request.shardId());
if (shard != null) {
shard.indexingService().noopUpdate(request.type());
}
}
listener.onResponse(update);
break;
default:
throw new IllegalStateException("Illegal operation " + result.operation());
}
}
}
| |
package org.altbeacon.beacon;
import android.os.Parcel;
import static android.test.MoreAsserts.assertNotEqual;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertTrue;
import org.robolectric.RobolectricTestRunner;
import org.junit.runner.RunWith;
import org.junit.Test;
import org.robolectric.annotation.Config;
import java.util.Arrays;
import java.util.UUID;
@Config(sdk = 28)
@RunWith(RobolectricTestRunner.class)
/*
HOW TO SEE DEBUG LINES FROM YOUR UNIT TESTS:
1. set a line like this at the start of your test:
org.robolectric.shadows.ShadowLog.stream = System.err;
2. run the tests from the command line
3. Look at the test report file in your web browser, e.g.
file:///Users/dyoung/workspace/AndroidProximityLibrary/build/reports/tests/index.html
4. Expand the System.err section
*/
public class IdentifierTest {
@Test
public void testEqualsNormalizationIgnoresCase() {
Identifier identifier1 = Identifier.parse("2f234454-cf6d-4a0f-adf2-f4911ba9ffa6");
Identifier identifier2 = Identifier.parse("2F234454-CF6D-4A0F-ADF2-F4911BA9FFA6");
assertTrue("Identifiers of different case should match", identifier1.equals(identifier2));
}
@Test
public void testToStringNormalizesCase() {
Identifier identifier1 = Identifier.parse("2F234454-CF6D-4A0F-ADF2-F4911BA9FFA6");
assertEquals("Identifiers of different case should match", "2f234454-cf6d-4a0f-adf2-f4911ba9ffa6", identifier1.toString());
}
@Test
public void testToStringEqualsUuid() {
Identifier identifier1 = Identifier.parse("2F234454-CF6D-4A0F-ADF2-F4911BA9FFA6");
assertEquals("uuidString of Identifier should match", "2f234454-cf6d-4a0f-adf2-f4911ba9ffa6", identifier1.toUuidString());
}
@Test
public void testToUuidEqualsToUuidString() {
Identifier identifier1 = Identifier.parse("2F234454-CF6D-4A0F-ADF2-F4911BA9FFA6");
assertEquals("uuidString of Identifier should match", identifier1.toUuid().toString(), identifier1.toUuidString());
}
@Test
public void testToByteArrayConvertsUuids() {
Identifier identifier1 = Identifier.parse("2F234454-CF6D-4A0F-ADF2-F4911BA9FFA6");
byte[] bytes = identifier1.toByteArrayOfSpecifiedEndianness(true);
assertEquals("byte array is correct length", bytes.length, 16);
assertEquals("first byte of uuid converted properly", 0x2f, bytes[0] & 0xFF);
assertEquals("second byte of uuid converted properly", 0x23, bytes[1] & 0xFF);
assertEquals("last byte of uuid converted properly", 0xa6, bytes[15] & 0xFF);
}
@Test
public void testToByteArrayConvertsUuidsAsLittleEndian() {
Identifier identifier1 = Identifier.parse("2F234454-CF6D-4A0F-ADF2-F4911BA9FFA6");
byte[] bytes = identifier1.toByteArrayOfSpecifiedEndianness(false);
assertEquals("byte array is correct length", bytes.length, 16);
assertEquals("first byte of uuid converted properly", 0xa6, bytes[0] & 0xFF);
assertEquals("last byte of uuid converted properly", 0x2f, bytes[15] & 0xFF);
}
@Test
public void testToByteArrayConvertsHex() {
Identifier identifier1 = Identifier.parse("0x010203040506");
byte[] bytes = identifier1.toByteArrayOfSpecifiedEndianness(true);
assertEquals("byte array is correct length", bytes.length, 6);
assertEquals("first byte of hex is converted properly", 0x01, bytes[0] & 0xFF);
assertEquals("last byte of hex is converted properly", 0x06, bytes[5] & 0xFF);
}
@Test
public void testToByteArrayConvertsDecimal() {
Identifier identifier1 = Identifier.parse("65534");
byte[] bytes = identifier1.toByteArrayOfSpecifiedEndianness(true);
assertEquals("byte array is correct length", bytes.length, 2);
assertEquals("reported byte array is correct length", identifier1.getByteCount(), 2);
assertEquals("first byte of decimal converted properly", 0xff, bytes[0] & 0xFF);
assertEquals("last byte of decimal converted properly", 0xfe, bytes[1] & 0xFF);
}
@Test
public void testToByteArrayConvertsInt() {
Identifier identifier1 = Identifier.fromInt(65534);
byte[] bytes = identifier1.toByteArrayOfSpecifiedEndianness(true);
assertEquals("byte array is correct length", bytes.length, 2);
assertEquals("reported byte array is correct length", identifier1.getByteCount(), 2);
assertEquals("conversion back equals original value", identifier1.toInt(), 65534);
assertEquals("first byte of decimal converted properly", 0xff, bytes[0] & 0xFF);
assertEquals("last byte of decimal converted properly", 0xfe, bytes[1] & 0xFF);
}
@Test
public void testToByteArrayFromByteArray() {
byte[] value = new byte[] {(byte) 0xFF, (byte) 0xAB, 0x12, 0x25};
Identifier identifier1 = Identifier.fromBytes(value, 0, value.length, false);
byte[] bytes = identifier1.toByteArrayOfSpecifiedEndianness(true);
assertEquals("byte array is correct length", bytes.length, 4);
assertEquals("correct string representation", identifier1.toString(), "0xffab1225");
assertTrue("arrays equal", Arrays.equals(value, bytes));
assertNotSame("arrays are copied", bytes, value);
}
@Test
public void testComparableDifferentLength() {
byte[] value1 = new byte[] {(byte) 0xFF, (byte) 0xAB, 0x12, 0x25};
Identifier identifier1 = Identifier.fromBytes(value1, 0, value1.length, false);
byte[] value2 = new byte[] {(byte) 0xFF, (byte) 0xAB, 0x12, 0x25, 0x11, 0x11};
Identifier identifier2 = Identifier.fromBytes(value2, 0, value2.length, false);
assertEquals("identifier1 is smaller than identifier2", identifier1.compareTo(identifier2), -1);
assertEquals("identifier2 is larger than identifier1", identifier2.compareTo(identifier1), 1);
}
@Test
public void testComparableSameLength() {
byte[] value1 = new byte[] {(byte) 0xFF, (byte) 0xAB, 0x12, 0x25, 0x22, 0x25};
Identifier identifier1 = Identifier.fromBytes(value1, 0, value1.length, false);
byte[] value2 = new byte[] {(byte) 0xFF, (byte) 0xAB, 0x12, 0x25, 0x11, 0x11};
Identifier identifier2 = Identifier.fromBytes(value2, 0, value2.length, false);
assertEquals("identifier1 is equal to identifier2", identifier1.compareTo(identifier1), 0);
assertEquals("identifier1 is larger than identifier2", identifier1.compareTo(identifier2), 1);
assertEquals("identifier2 is smaller than identifier1", identifier2.compareTo(identifier1), -1);
}
@Test
public void testParseIntegerMaxInclusive() {
Identifier.parse("65535");
}
@Test(expected = IllegalArgumentException.class)
public void testParseIntegerAboveMax() {
Identifier.parse("65536");
}
@Test
public void testParseIntegerMinInclusive() {
Identifier.parse("0");
}
@Test(expected = IllegalArgumentException.class)
public void testParseIntegerBelowMin() {
Identifier.parse("-1");
}
@Test(expected = IllegalArgumentException.class)
public void testParseIntegerWayTooBig() {
Identifier.parse("3133742");
}
/*
* This is here because Identifier.parse wrongly accepts UUIDs without
* dashes, but we want to be backward compatible.
*/
@Test
public void testParseInvalidUuid() {
UUID ref = UUID.fromString("2f234454-cf6d-4a0f-adf2-f4911ba9ffa6");
Identifier id = Identifier.parse("2f234454cf6d4a0fadf2f4911ba9ffa6");
assertEquals("Malformed UUID was parsed as expected.", id.toUuid(), ref);
}
@Test
public void testParseHexWithNoPrefix() {
Identifier id = Identifier.parse("abcd");
assertEquals("Should parse and get back equivalent decimal value for small numbers", "43981", id.toString());
}
@Test
public void testParseBigHexWithNoPrefix() {
Identifier id = Identifier.parse("123456789abcdef");
assertEquals("Should parse and get prefixed hex value for big numbers", "0x0123456789abcdef", id.toString());
}
@Test
public void testParseZeroPrefixedDecimalNumberAsHex() {
Identifier id = Identifier.parse("0010");
assertEquals("Should be treated as hex in parse, but converted back to decimal because it is small", "16", id.toString());
}
@Test
public void testParseNonZeroPrefixedDecimalNumberAsDecimal() {
Identifier id = Identifier.parse("10");
assertEquals("Should be treated as decimal", "10", id.toString());
}
@Test
public void testParseDecimalNumberWithSpecifiedLength() {
Identifier id = Identifier.parse("10", 8);
assertEquals("Should be treated as hex because it is long", "0x000000000000000a", id.toString());
assertEquals("Byte count should be as specified", 8, id.getByteCount());
}
@Test
public void testParseDecimalNumberWithSpecifiedShortLength() {
Identifier id = Identifier.parse("10", 2);
assertEquals("Should be treated as decimal because it is short", "10", id.toString());
assertEquals("Byte count should be as specified", 2, id.getByteCount());
}
@Test
public void testParseHexNumberWithSpecifiedLength() {
Identifier id = Identifier.parse("2fffffffffffffffffff", 10);
assertEquals("Should be treated as hex because it is long", "0x2fffffffffffffffffff", id.toString());
assertEquals("Byte count should be as specified", 10, id.getByteCount());
}
@Test
public void testParseZeroAsInteger() {
Identifier id = Identifier.parse("0");
assertEquals("Should be treated as int because it is a common integer", "0", id.toString());
assertEquals("Byte count should be 2 for integers", 2, id.getByteCount());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.metrics.datadog;
import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.Gauge;
import org.apache.flink.metrics.Histogram;
import org.apache.flink.metrics.Meter;
import org.apache.flink.metrics.Metric;
import org.apache.flink.metrics.MetricConfig;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.metrics.reporter.MetricReporter;
import org.apache.flink.metrics.reporter.Scheduled;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Metric Reporter for Datadog.
*
* <p>Variables in metrics scope will be sent to Datadog as tags.
*/
public class DatadogHttpReporter implements MetricReporter, Scheduled {
private static final Logger LOGGER = LoggerFactory.getLogger(DatadogHttpReporter.class);
private static final String HOST_VARIABLE = "<host>";
// Both Flink's Gauge and Meter values are taken as gauge in Datadog
private final Map<Gauge, DGauge> gauges = new ConcurrentHashMap<>();
private final Map<Counter, DCounter> counters = new ConcurrentHashMap<>();
private final Map<Meter, DMeter> meters = new ConcurrentHashMap<>();
private DatadogHttpClient client;
private List<String> configTags;
public static final String API_KEY = "apikey";
public static final String PROXY_HOST = "proxyHost";
public static final String PROXY_PORT = "proxyPort";
public static final String TAGS = "tags";
@Override
public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
final String name = group.getMetricIdentifier(metricName);
List<String> tags = new ArrayList<>(configTags);
tags.addAll(getTagsFromMetricGroup(group));
String host = getHostFromMetricGroup(group);
if (metric instanceof Counter) {
Counter c = (Counter) metric;
counters.put(c, new DCounter(c, name, host, tags));
} else if (metric instanceof Gauge) {
Gauge g = (Gauge) metric;
gauges.put(g, new DGauge(g, name, host, tags));
} else if (metric instanceof Meter) {
Meter m = (Meter) metric;
// Only consider rate
meters.put(m, new DMeter(m, name, host, tags));
} else if (metric instanceof Histogram) {
LOGGER.warn("Cannot add {} because Datadog HTTP API doesn't support Histogram", metricName);
} else {
LOGGER.warn("Cannot add unknown metric type {}. This indicates that the reporter " +
"does not support this metric type.", metric.getClass().getName());
}
}
@Override
public void notifyOfRemovedMetric(Metric metric, String metricName, MetricGroup group) {
if (metric instanceof Counter) {
counters.remove(metric);
} else if (metric instanceof Gauge) {
gauges.remove(metric);
} else if (metric instanceof Meter) {
meters.remove(metric);
} else if (metric instanceof Histogram) {
// No Histogram is registered
} else {
LOGGER.warn("Cannot remove unknown metric type {}. This indicates that the reporter " +
"does not support this metric type.", metric.getClass().getName());
}
}
@Override
public void open(MetricConfig config) {
String apiKey = config.getString(API_KEY, null);
String proxyHost = config.getString(PROXY_HOST, null);
Integer proxyPort = config.getInteger(PROXY_PORT, 8080);
client = new DatadogHttpClient(apiKey, proxyHost, proxyPort);
LOGGER.info("Configured DatadogHttpReporter");
configTags = getTagsFromConfig(config.getString(TAGS, ""));
}
@Override
public void close() {
client.close();
LOGGER.info("Shut down DatadogHttpReporter");
}
@Override
public void report() {
DatadogHttpRequest request = new DatadogHttpRequest();
List<Gauge> gaugesToRemove = new ArrayList<>();
for (Map.Entry<Gauge, DGauge> entry : gauges.entrySet()) {
DGauge g = entry.getValue();
try {
// Will throw exception if the Gauge is not of Number type
// Flink uses Gauge to store many types other than Number
g.getMetricValue();
request.addGauge(g);
} catch (ClassCastException e) {
LOGGER.info("The metric {} will not be reported because only number types are supported by this reporter.", g.getMetric());
gaugesToRemove.add(entry.getKey());
} catch (Exception e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("The metric {} will not be reported because it threw an exception.", g.getMetric(), e);
} else {
LOGGER.info("The metric {} will not be reported because it threw an exception.", g.getMetric());
}
gaugesToRemove.add(entry.getKey());
}
}
gaugesToRemove.forEach(gauges::remove);
for (DCounter c : counters.values()) {
request.addCounter(c);
}
for (DMeter m : meters.values()) {
request.addMeter(m);
}
try {
client.send(request);
LOGGER.debug("Reported series with size {}.", request.getSeries().getSeries().size());
} catch (SocketTimeoutException e) {
LOGGER.warn("Failed reporting metrics to Datadog because of socket timeout.", e.getMessage());
} catch (Exception e) {
LOGGER.warn("Failed reporting metrics to Datadog.", e);
}
}
/**
* Get config tags from config 'metrics.reporter.dghttp.tags'.
*/
private List<String> getTagsFromConfig(String str) {
return Arrays.asList(str.split(","));
}
/**
* Get tags from MetricGroup#getAllVariables(), excluding 'host'.
*/
private List<String> getTagsFromMetricGroup(MetricGroup metricGroup) {
List<String> tags = new ArrayList<>();
for (Map.Entry<String, String> entry: metricGroup.getAllVariables().entrySet()) {
if (!entry.getKey().equals(HOST_VARIABLE)) {
tags.add(getVariableName(entry.getKey()) + ":" + entry.getValue());
}
}
return tags;
}
private String getHostFromMetricGroup(MetricGroup metricGroup) {
return metricGroup.getAllVariables().get(HOST_VARIABLE);
}
/**
* Removes leading and trailing angle brackets.
*/
private String getVariableName(String str) {
return str.substring(1, str.length() - 1);
}
/**
* Compact metrics in batch, serialize them, and send to Datadog via HTTP.
*/
static class DatadogHttpRequest {
private final DSeries series;
public DatadogHttpRequest() {
series = new DSeries();
}
public void addGauge(DGauge gauge) {
series.addMetric(gauge);
}
public void addCounter(DCounter counter) {
series.addMetric(counter);
}
public void addMeter(DMeter meter) {
series.addMetric(meter);
}
public DSeries getSeries() {
return series;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.plugins;
import com.intellij.diagnostic.PluginException;
import com.intellij.ide.ClassUtilCore;
import com.intellij.ide.IdeBundle;
import com.intellij.idea.IdeaApplication;
import com.intellij.idea.Main;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.extensions.impl.PicoPluginExtensionInitializationException;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.util.List;
/**
* @author mike
*/
public class PluginManager extends PluginManagerCore {
@NonNls public static final String INSTALLED_TXT = "installed.txt";
public static long startupStart;
/**
* Called via reflection
*/
@SuppressWarnings({"UnusedDeclaration", "HardCodedStringLiteral"})
protected static void start(final String mainClass, final String methodName, final String[] args) {
startupStart = System.nanoTime();
Main.setFlags(args);
if (!Main.isHeadless()) {
UIUtil.initDefaultLAF();
}
ThreadGroup threadGroup = new ThreadGroup("Idea Thread Group") {
@Override
public void uncaughtException(Thread t, Throwable e) {
processException(e);
}
};
Runnable runnable = () -> {
try {
ClassUtilCore.clearJarURLCache();
Class<?> aClass = Class.forName(mainClass);
Method method = aClass.getDeclaredMethod(methodName, ArrayUtil.EMPTY_STRING_ARRAY.getClass());
method.setAccessible(true);
Object[] argsArray = {args};
method.invoke(null, argsArray);
}
catch (Throwable t) {
throw new StartupAbortedException(t);
}
};
new Thread(threadGroup, runnable, "Idea Main Thread").start();
}
/**
* @return file with list of once installed plugins if it exists, null otherwise
*/
@Nullable
public static File getOnceInstalledIfExists() {
File onceInstalledFile = new File(PathManager.getConfigPath(), INSTALLED_TXT);
return onceInstalledFile.isFile() ? onceInstalledFile : null;
}
public static void processException(Throwable t) {
if (!IdeaApplication.isLoaded()) {
@SuppressWarnings("ThrowableResultOfMethodCallIgnored") StartupAbortedException se = findCause(t, StartupAbortedException.class);
if (se == null) se = new StartupAbortedException(t);
@SuppressWarnings("ThrowableResultOfMethodCallIgnored") PluginException pe = findCause(t, PluginException.class);
PluginId pluginId = pe != null ? pe.getPluginId() : null;
if (Logger.isInitialized() && !(t instanceof ProcessCanceledException)) {
try {
getLogger().error(t);
}
catch (Throwable ignore) { }
}
if (pluginId != null && !CORE_PLUGIN_ID.equals(pluginId.getIdString())) {
disablePlugin(pluginId.getIdString());
StringWriter message = new StringWriter();
message.append("Plugin '").append(pluginId.getIdString()).append("' failed to initialize and will be disabled. ");
message.append(" Please restart ").append(ApplicationNamesInfo.getInstance().getFullProductName()).append('.');
message.append("\n\n");
pe.getCause().printStackTrace(new PrintWriter(message));
Main.showMessage("Plugin Error", message.toString(), false);
System.exit(Main.PLUGIN_ERROR);
}
else {
Main.showMessage("Start Failed", t);
System.exit(se.exitCode());
}
}
else if (!(t instanceof ProcessCanceledException)) {
getLogger().error(t);
}
}
private static <T extends Throwable> T findCause(Throwable t, Class<T> clazz) {
while (t != null) {
if (clazz.isInstance(t)) {
return clazz.cast(t);
}
t = t.getCause();
}
return null;
}
private static Thread.UncaughtExceptionHandler HANDLER = (t, e) -> processException(e);
public static void installExceptionHandler() {
Thread.currentThread().setUncaughtExceptionHandler(HANDLER);
}
public static void reportPluginError() {
if (myPluginError != null) {
String title = IdeBundle.message("title.plugin.error");
Notifications.Bus.notify(new Notification(title, title, myPluginError, NotificationType.ERROR, new NotificationListener() {
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod")
@Override
public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) {
notification.expire();
String description = event.getDescription();
if (EDIT.equals(description)) {
PluginManagerConfigurable configurable = new PluginManagerConfigurable(PluginManagerUISettings.getInstance());
IdeFrame ideFrame = WindowManagerEx.getInstanceEx().findFrameFor(null);
ShowSettingsUtil.getInstance().editConfigurable((JFrame)ideFrame, configurable);
return;
}
List<String> disabledPlugins = getDisabledPlugins();
if (myPlugins2Disable != null && DISABLE.equals(description)) {
for (String pluginId : myPlugins2Disable) {
if (!disabledPlugins.contains(pluginId)) {
disabledPlugins.add(pluginId);
}
}
}
else if (myPlugins2Enable != null && ENABLE.equals(description)) {
disabledPlugins.removeAll(myPlugins2Enable);
PluginManagerMain.notifyPluginsUpdated(null);
}
try {
saveDisabledPlugins(disabledPlugins, false);
}
catch (IOException ignore) { }
myPlugins2Enable = null;
myPlugins2Disable = null;
}
}));
myPluginError = null;
}
}
public static boolean isPluginInstalled(PluginId id) {
return getPlugin(id) != null;
}
@Nullable
public static IdeaPluginDescriptor getPlugin(@Nullable PluginId id) {
final IdeaPluginDescriptor[] plugins = getPlugins();
for (final IdeaPluginDescriptor plugin : plugins) {
if (Comparing.equal(id, plugin.getPluginId())) {
return plugin;
}
}
return null;
}
public static void handleComponentError(Throwable t, @Nullable String componentClassName, @Nullable PluginId pluginId) {
Application app = ApplicationManager.getApplication();
if (app != null && app.isUnitTestMode()) {
if (t instanceof Error) throw (Error)t;
if (t instanceof RuntimeException) throw (RuntimeException)t;
throw new RuntimeException(t);
}
if (t instanceof StartupAbortedException) {
throw (StartupAbortedException)t;
}
if (pluginId == null || CORE_PLUGIN_ID.equals(pluginId.getIdString())) {
if (componentClassName != null) {
pluginId = getPluginByClassName(componentClassName);
}
}
if (pluginId == null || CORE_PLUGIN_ID.equals(pluginId.getIdString())) {
if (t instanceof PicoPluginExtensionInitializationException) {
pluginId = ((PicoPluginExtensionInitializationException)t).getPluginId();
}
}
if (pluginId != null && !CORE_PLUGIN_ID.equals(pluginId.getIdString())) {
throw new StartupAbortedException(new PluginException(t, pluginId));
}
else {
throw new StartupAbortedException("Fatal error initializing '" + componentClassName + "'", t);
}
}
private static class StartupAbortedException extends RuntimeException {
private int exitCode = Main.STARTUP_EXCEPTION;
public StartupAbortedException(Throwable cause) {
super(cause);
}
public StartupAbortedException(String message, Throwable cause) {
super(message, cause);
}
public int exitCode() {
return exitCode;
}
public StartupAbortedException exitCode(int exitCode) {
this.exitCode = exitCode;
return this;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.internal.InternalStatisticsDisabledException;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.eviction.EvictionController;
import org.apache.geode.internal.cache.eviction.EvictionNode;
import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VMStatsLRURegionEntryHeapIntKey extends VMStatsLRURegionEntryHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VMStatsLRURegionEntryHeapIntKey> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMStatsLRURegionEntryHeapIntKey.class, "lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
private volatile Object value;
// --------------------------------------- stats fields -----------------------------------------
private volatile long lastAccessed;
private volatile int hitCount;
private volatile int missCount;
private static final AtomicIntegerFieldUpdater<VMStatsLRURegionEntryHeapIntKey> HIT_COUNT_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(VMStatsLRURegionEntryHeapIntKey.class, "hitCount");
private static final AtomicIntegerFieldUpdater<VMStatsLRURegionEntryHeapIntKey> MISS_COUNT_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(VMStatsLRURegionEntryHeapIntKey.class, "missCount");
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private final int key;
public VMStatsLRURegionEntryHeapIntKey(final RegionEntryContext context, final int key,
final Object value) {
super(context, value);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
protected Object getValueField() {
return this.value;
}
@Override
protected void setValueField(final Object value) {
this.value = value;
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return this.hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return this.nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// --------------------------------------- eviction code ----------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) {
// nothing needed for LRUs with no disk
}
@Override
public synchronized int updateEntrySize(final EvictionController evictionController) {
// OFFHEAP: getValue ok w/o incing refcount because we are synced and only getting the size
return updateEntrySize(evictionController, getValue());
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public synchronized int updateEntrySize(final EvictionController evictionController,
final Object value) {
int oldSize = getEntrySize();
int newSize = evictionController.entrySize(getKeyForSizing(), value);
setEntrySize(newSize);
int delta = newSize - oldSize;
return delta;
}
@Override
public boolean isRecentlyUsed() {
return areAnyBitsSet(RECENTLY_USED);
}
@Override
public void setRecentlyUsed(RegionEntryContext context) {
if (!isRecentlyUsed()) {
setBits(RECENTLY_USED);
context.incRecentlyUsed();
}
}
@Override
public void unsetRecentlyUsed() {
clearBits(~RECENTLY_USED);
}
@Override
public boolean isEvicted() {
return areAnyBitsSet(EVICTED);
}
@Override
public void setEvicted() {
setBits(EVICTED);
}
@Override
public void unsetEvicted() {
clearBits(~EVICTED);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private EvictionNode nextEvictionNode;
private EvictionNode previousEvictionNode;
private int size;
@Override
public void setNext(final EvictionNode nextEvictionNode) {
this.nextEvictionNode = nextEvictionNode;
}
@Override
public EvictionNode next() {
return this.nextEvictionNode;
}
@Override
public void setPrevious(final EvictionNode previousEvictionNode) {
this.previousEvictionNode = previousEvictionNode;
}
@Override
public EvictionNode previous() {
return this.previousEvictionNode;
}
@Override
public int getEntrySize() {
return this.size;
}
protected void setEntrySize(final int size) {
this.size = size;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKeyForSizing() {
// inline keys always report null for sizing since the size comes from the entry size
return null;
}
// ---------------------------------------- stats code ------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void updateStatsForGet(final boolean isHit, final long time) {
setLastAccessed(time);
if (isHit) {
incrementHitCount();
} else {
incrementMissCount();
}
}
@Override
protected void setLastModifiedAndAccessedTimes(final long lastModified, final long lastAccessed) {
_setLastModified(lastModified);
if (!DISABLE_ACCESS_TIME_UPDATE_ON_PUT) {
setLastAccessed(lastAccessed);
}
}
@Override
public long getLastAccessed() throws InternalStatisticsDisabledException {
return this.lastAccessed;
}
@Override
public void setLastAccessed(final long lastAccessed) {
this.lastAccessed = lastAccessed;
}
@Override
public long getHitCount() throws InternalStatisticsDisabledException {
return this.hitCount & 0xFFFFFFFFL;
}
@Override
public long getMissCount() throws InternalStatisticsDisabledException {
return this.missCount & 0xFFFFFFFFL;
}
private void incrementHitCount() {
HIT_COUNT_UPDATER.incrementAndGet(this);
}
private void incrementMissCount() {
MISS_COUNT_UPDATER.incrementAndGet(this);
}
@Override
public void resetCounts() throws InternalStatisticsDisabledException {
HIT_COUNT_UPDATER.set(this, 0);
MISS_COUNT_UPDATER.set(this, 0);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void txDidDestroy(long timeStamp) {
setLastModified(timeStamp);
setLastAccessed(timeStamp);
this.hitCount = 0;
this.missCount = 0;
}
@Override
public boolean hasStats() {
return true;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKey() {
return this.key;
}
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof Integer) {
return ((Integer) key).intValue() == this.key;
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.memorydb.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents the output of one of the following operations:
* </p>
* <ul>
* <li>
* <p>
* CreateSubnetGroup
* </p>
* </li>
* <li>
* <p>
* UpdateSubnetGroup
* </p>
* </li>
* </ul>
* <p>
* A subnet group is a collection of subnets (typically private) that you can designate for your clusters running in an
* Amazon Virtual Private Cloud (VPC) environment.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/memorydb-2021-01-01/SubnetGroup" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SubnetGroup implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the subnet group
* </p>
*/
private String name;
/**
* <p>
* A description of the subnet group
* </p>
*/
private String description;
/**
* <p>
* The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
* </p>
*/
private String vpcId;
/**
* <p>
* A list of subnets associated with the subnet group.
* </p>
*/
private java.util.List<Subnet> subnets;
/**
* <p>
* The ARN (Amazon Resource Name) of the subnet group.
* </p>
*/
private String aRN;
/**
* <p>
* The name of the subnet group
* </p>
*
* @param name
* The name of the subnet group
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the subnet group
* </p>
*
* @return The name of the subnet group
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the subnet group
* </p>
*
* @param name
* The name of the subnet group
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SubnetGroup withName(String name) {
setName(name);
return this;
}
/**
* <p>
* A description of the subnet group
* </p>
*
* @param description
* A description of the subnet group
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description of the subnet group
* </p>
*
* @return A description of the subnet group
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description of the subnet group
* </p>
*
* @param description
* A description of the subnet group
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SubnetGroup withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
* </p>
*
* @param vpcId
* The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
* </p>
*
* @return The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
* </p>
*
* @param vpcId
* The Amazon Virtual Private Cloud identifier (VPC ID) of the subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SubnetGroup withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* <p>
* A list of subnets associated with the subnet group.
* </p>
*
* @return A list of subnets associated with the subnet group.
*/
public java.util.List<Subnet> getSubnets() {
return subnets;
}
/**
* <p>
* A list of subnets associated with the subnet group.
* </p>
*
* @param subnets
* A list of subnets associated with the subnet group.
*/
public void setSubnets(java.util.Collection<Subnet> subnets) {
if (subnets == null) {
this.subnets = null;
return;
}
this.subnets = new java.util.ArrayList<Subnet>(subnets);
}
/**
* <p>
* A list of subnets associated with the subnet group.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSubnets(java.util.Collection)} or {@link #withSubnets(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param subnets
* A list of subnets associated with the subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SubnetGroup withSubnets(Subnet... subnets) {
if (this.subnets == null) {
setSubnets(new java.util.ArrayList<Subnet>(subnets.length));
}
for (Subnet ele : subnets) {
this.subnets.add(ele);
}
return this;
}
/**
* <p>
* A list of subnets associated with the subnet group.
* </p>
*
* @param subnets
* A list of subnets associated with the subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SubnetGroup withSubnets(java.util.Collection<Subnet> subnets) {
setSubnets(subnets);
return this;
}
/**
* <p>
* The ARN (Amazon Resource Name) of the subnet group.
* </p>
*
* @param aRN
* The ARN (Amazon Resource Name) of the subnet group.
*/
public void setARN(String aRN) {
this.aRN = aRN;
}
/**
* <p>
* The ARN (Amazon Resource Name) of the subnet group.
* </p>
*
* @return The ARN (Amazon Resource Name) of the subnet group.
*/
public String getARN() {
return this.aRN;
}
/**
* <p>
* The ARN (Amazon Resource Name) of the subnet group.
* </p>
*
* @param aRN
* The ARN (Amazon Resource Name) of the subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SubnetGroup withARN(String aRN) {
setARN(aRN);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId()).append(",");
if (getSubnets() != null)
sb.append("Subnets: ").append(getSubnets()).append(",");
if (getARN() != null)
sb.append("ARN: ").append(getARN());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SubnetGroup == false)
return false;
SubnetGroup other = (SubnetGroup) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
if (other.getSubnets() == null ^ this.getSubnets() == null)
return false;
if (other.getSubnets() != null && other.getSubnets().equals(this.getSubnets()) == false)
return false;
if (other.getARN() == null ^ this.getARN() == null)
return false;
if (other.getARN() != null && other.getARN().equals(this.getARN()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
hashCode = prime * hashCode + ((getSubnets() == null) ? 0 : getSubnets().hashCode());
hashCode = prime * hashCode + ((getARN() == null) ? 0 : getARN().hashCode());
return hashCode;
}
@Override
public SubnetGroup clone() {
try {
return (SubnetGroup) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.memorydb.model.transform.SubnetGroupMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.util.par;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Immutable configuration for a streamlines context.
*
* <h3>Layout</h3>
*
* <pre><code>
* struct parsl_config {
* float thickness;
* uint32_t {@link #flags};
* int {@link #u_mode};
* float curves_max_flatness;
* float streamlines_seed_spacing;
* {@link ParSLViewport parsl_viewport} streamlines_seed_viewport;
* float miter_limit;
* }</code></pre>
*/
@NativeType("struct parsl_config")
public class ParSLConfig extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
THICKNESS,
FLAGS,
U_MODE,
CURVES_MAX_FLATNESS,
STREAMLINES_SEED_SPACING,
STREAMLINES_SEED_VIEWPORT,
MITER_LIMIT;
static {
Layout layout = __struct(
__member(4),
__member(4),
__member(4),
__member(4),
__member(4),
__member(ParSLViewport.SIZEOF, ParSLViewport.ALIGNOF),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
THICKNESS = layout.offsetof(0);
FLAGS = layout.offsetof(1);
U_MODE = layout.offsetof(2);
CURVES_MAX_FLATNESS = layout.offsetof(3);
STREAMLINES_SEED_SPACING = layout.offsetof(4);
STREAMLINES_SEED_VIEWPORT = layout.offsetof(5);
MITER_LIMIT = layout.offsetof(6);
}
/**
* Creates a {@code ParSLConfig} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public ParSLConfig(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** @return the value of the {@code thickness} field. */
public float thickness() { return nthickness(address()); }
/** one or more of:<br><table><tr><td>{@link ParStreamlines#PARSL_FLAG_WIREFRAME FLAG_WIREFRAME}</td><td>{@link ParStreamlines#PARSL_FLAG_ANNOTATIONS FLAG_ANNOTATIONS}</td><td>{@link ParStreamlines#PARSL_FLAG_SPINE_LENGTHS FLAG_SPINE_LENGTHS}</td><td>{@link ParStreamlines#PARSL_FLAG_RANDOM_OFFSETS FLAG_RANDOM_OFFSETS}</td></tr><tr><td>{@link ParStreamlines#PARSL_FLAG_CURVE_GUIPARSL_FLAG_DES FLAG_CURVE_GUIPARSL_FLAG_DES}</td></tr></table> */
@NativeType("uint32_t")
public int flags() { return nflags(address()); }
/** one of:<br><table><tr><td>{@link ParStreamlines#PAR_U_MODE_NORMALIZED_DISTANCE}</td><td>{@link ParStreamlines#PAR_U_MODE_DISTANCE}</td><td>{@link ParStreamlines#PAR_U_MODE_SEGMENT_INDEX}</td></tr><tr><td>{@link ParStreamlines#PAR_U_MODE_SEGMENT_FRACTION}</td></tr></table> */
public int u_mode() { return nu_mode(address()); }
/** @return the value of the {@code curves_max_flatness} field. */
public float curves_max_flatness() { return ncurves_max_flatness(address()); }
/** @return the value of the {@code streamlines_seed_spacing} field. */
public float streamlines_seed_spacing() { return nstreamlines_seed_spacing(address()); }
/** @return a {@link ParSLViewport} view of the {@code streamlines_seed_viewport} field. */
@NativeType("parsl_viewport")
public ParSLViewport streamlines_seed_viewport() { return nstreamlines_seed_viewport(address()); }
/** @return the value of the {@code miter_limit} field. */
public float miter_limit() { return nmiter_limit(address()); }
/** Sets the specified value to the {@code thickness} field. */
public ParSLConfig thickness(float value) { nthickness(address(), value); return this; }
/** Sets the specified value to the {@link #flags} field. */
public ParSLConfig flags(@NativeType("uint32_t") int value) { nflags(address(), value); return this; }
/** Sets the specified value to the {@link #u_mode} field. */
public ParSLConfig u_mode(int value) { nu_mode(address(), value); return this; }
/** Sets the specified value to the {@code curves_max_flatness} field. */
public ParSLConfig curves_max_flatness(float value) { ncurves_max_flatness(address(), value); return this; }
/** Sets the specified value to the {@code streamlines_seed_spacing} field. */
public ParSLConfig streamlines_seed_spacing(float value) { nstreamlines_seed_spacing(address(), value); return this; }
/** Copies the specified {@link ParSLViewport} to the {@code streamlines_seed_viewport} field. */
public ParSLConfig streamlines_seed_viewport(@NativeType("parsl_viewport") ParSLViewport value) { nstreamlines_seed_viewport(address(), value); return this; }
/** Passes the {@code streamlines_seed_viewport} field to the specified {@link java.util.function.Consumer Consumer}. */
public ParSLConfig streamlines_seed_viewport(java.util.function.Consumer<ParSLViewport> consumer) { consumer.accept(streamlines_seed_viewport()); return this; }
/** Sets the specified value to the {@code miter_limit} field. */
public ParSLConfig miter_limit(float value) { nmiter_limit(address(), value); return this; }
/** Initializes this struct with the specified values. */
public ParSLConfig set(
float thickness,
int flags,
int u_mode,
float curves_max_flatness,
float streamlines_seed_spacing,
ParSLViewport streamlines_seed_viewport,
float miter_limit
) {
thickness(thickness);
flags(flags);
u_mode(u_mode);
curves_max_flatness(curves_max_flatness);
streamlines_seed_spacing(streamlines_seed_spacing);
streamlines_seed_viewport(streamlines_seed_viewport);
miter_limit(miter_limit);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public ParSLConfig set(ParSLConfig src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code ParSLConfig} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static ParSLConfig malloc() {
return wrap(ParSLConfig.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code ParSLConfig} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static ParSLConfig calloc() {
return wrap(ParSLConfig.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code ParSLConfig} instance allocated with {@link BufferUtils}. */
public static ParSLConfig create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(ParSLConfig.class, memAddress(container), container);
}
/** Returns a new {@code ParSLConfig} instance for the specified memory address. */
public static ParSLConfig create(long address) {
return wrap(ParSLConfig.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static ParSLConfig createSafe(long address) {
return address == NULL ? null : wrap(ParSLConfig.class, address);
}
/**
* Returns a new {@link ParSLConfig.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static ParSLConfig.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link ParSLConfig.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static ParSLConfig.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link ParSLConfig.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static ParSLConfig.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link ParSLConfig.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static ParSLConfig.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static ParSLConfig.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static ParSLConfig mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static ParSLConfig callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static ParSLConfig mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static ParSLConfig callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static ParSLConfig.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static ParSLConfig.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static ParSLConfig.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static ParSLConfig.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code ParSLConfig} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static ParSLConfig malloc(MemoryStack stack) {
return wrap(ParSLConfig.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code ParSLConfig} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static ParSLConfig calloc(MemoryStack stack) {
return wrap(ParSLConfig.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link ParSLConfig.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static ParSLConfig.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link ParSLConfig.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static ParSLConfig.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #thickness}. */
public static float nthickness(long struct) { return UNSAFE.getFloat(null, struct + ParSLConfig.THICKNESS); }
/** Unsafe version of {@link #flags}. */
public static int nflags(long struct) { return UNSAFE.getInt(null, struct + ParSLConfig.FLAGS); }
/** Unsafe version of {@link #u_mode}. */
public static int nu_mode(long struct) { return UNSAFE.getInt(null, struct + ParSLConfig.U_MODE); }
/** Unsafe version of {@link #curves_max_flatness}. */
public static float ncurves_max_flatness(long struct) { return UNSAFE.getFloat(null, struct + ParSLConfig.CURVES_MAX_FLATNESS); }
/** Unsafe version of {@link #streamlines_seed_spacing}. */
public static float nstreamlines_seed_spacing(long struct) { return UNSAFE.getFloat(null, struct + ParSLConfig.STREAMLINES_SEED_SPACING); }
/** Unsafe version of {@link #streamlines_seed_viewport}. */
public static ParSLViewport nstreamlines_seed_viewport(long struct) { return ParSLViewport.create(struct + ParSLConfig.STREAMLINES_SEED_VIEWPORT); }
/** Unsafe version of {@link #miter_limit}. */
public static float nmiter_limit(long struct) { return UNSAFE.getFloat(null, struct + ParSLConfig.MITER_LIMIT); }
/** Unsafe version of {@link #thickness(float) thickness}. */
public static void nthickness(long struct, float value) { UNSAFE.putFloat(null, struct + ParSLConfig.THICKNESS, value); }
/** Unsafe version of {@link #flags(int) flags}. */
public static void nflags(long struct, int value) { UNSAFE.putInt(null, struct + ParSLConfig.FLAGS, value); }
/** Unsafe version of {@link #u_mode(int) u_mode}. */
public static void nu_mode(long struct, int value) { UNSAFE.putInt(null, struct + ParSLConfig.U_MODE, value); }
/** Unsafe version of {@link #curves_max_flatness(float) curves_max_flatness}. */
public static void ncurves_max_flatness(long struct, float value) { UNSAFE.putFloat(null, struct + ParSLConfig.CURVES_MAX_FLATNESS, value); }
/** Unsafe version of {@link #streamlines_seed_spacing(float) streamlines_seed_spacing}. */
public static void nstreamlines_seed_spacing(long struct, float value) { UNSAFE.putFloat(null, struct + ParSLConfig.STREAMLINES_SEED_SPACING, value); }
/** Unsafe version of {@link #streamlines_seed_viewport(ParSLViewport) streamlines_seed_viewport}. */
public static void nstreamlines_seed_viewport(long struct, ParSLViewport value) { memCopy(value.address(), struct + ParSLConfig.STREAMLINES_SEED_VIEWPORT, ParSLViewport.SIZEOF); }
/** Unsafe version of {@link #miter_limit(float) miter_limit}. */
public static void nmiter_limit(long struct, float value) { UNSAFE.putFloat(null, struct + ParSLConfig.MITER_LIMIT, value); }
// -----------------------------------
/** An array of {@link ParSLConfig} structs. */
public static class Buffer extends StructBuffer<ParSLConfig, Buffer> implements NativeResource {
private static final ParSLConfig ELEMENT_FACTORY = ParSLConfig.create(-1L);
/**
* Creates a new {@code ParSLConfig.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link ParSLConfig#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected ParSLConfig getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@code thickness} field. */
public float thickness() { return ParSLConfig.nthickness(address()); }
/** @return the value of the {@link ParSLConfig#flags} field. */
@NativeType("uint32_t")
public int flags() { return ParSLConfig.nflags(address()); }
/** @return the value of the {@link ParSLConfig#u_mode} field. */
public int u_mode() { return ParSLConfig.nu_mode(address()); }
/** @return the value of the {@code curves_max_flatness} field. */
public float curves_max_flatness() { return ParSLConfig.ncurves_max_flatness(address()); }
/** @return the value of the {@code streamlines_seed_spacing} field. */
public float streamlines_seed_spacing() { return ParSLConfig.nstreamlines_seed_spacing(address()); }
/** @return a {@link ParSLViewport} view of the {@code streamlines_seed_viewport} field. */
@NativeType("parsl_viewport")
public ParSLViewport streamlines_seed_viewport() { return ParSLConfig.nstreamlines_seed_viewport(address()); }
/** @return the value of the {@code miter_limit} field. */
public float miter_limit() { return ParSLConfig.nmiter_limit(address()); }
/** Sets the specified value to the {@code thickness} field. */
public ParSLConfig.Buffer thickness(float value) { ParSLConfig.nthickness(address(), value); return this; }
/** Sets the specified value to the {@link ParSLConfig#flags} field. */
public ParSLConfig.Buffer flags(@NativeType("uint32_t") int value) { ParSLConfig.nflags(address(), value); return this; }
/** Sets the specified value to the {@link ParSLConfig#u_mode} field. */
public ParSLConfig.Buffer u_mode(int value) { ParSLConfig.nu_mode(address(), value); return this; }
/** Sets the specified value to the {@code curves_max_flatness} field. */
public ParSLConfig.Buffer curves_max_flatness(float value) { ParSLConfig.ncurves_max_flatness(address(), value); return this; }
/** Sets the specified value to the {@code streamlines_seed_spacing} field. */
public ParSLConfig.Buffer streamlines_seed_spacing(float value) { ParSLConfig.nstreamlines_seed_spacing(address(), value); return this; }
/** Copies the specified {@link ParSLViewport} to the {@code streamlines_seed_viewport} field. */
public ParSLConfig.Buffer streamlines_seed_viewport(@NativeType("parsl_viewport") ParSLViewport value) { ParSLConfig.nstreamlines_seed_viewport(address(), value); return this; }
/** Passes the {@code streamlines_seed_viewport} field to the specified {@link java.util.function.Consumer Consumer}. */
public ParSLConfig.Buffer streamlines_seed_viewport(java.util.function.Consumer<ParSLViewport> consumer) { consumer.accept(streamlines_seed_viewport()); return this; }
/** Sets the specified value to the {@code miter_limit} field. */
public ParSLConfig.Buffer miter_limit(float value) { ParSLConfig.nmiter_limit(address(), value); return this; }
}
}
| |
package com.planet_ink.coffee_mud.Libraries;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.interfaces.BoundedObject.BoundedCube;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.CMSecurity.DbgFlag;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.Electronics.Computer;
import com.planet_ink.coffee_mud.Items.interfaces.Electronics.ElecPanel;
import com.planet_ink.coffee_mud.Items.interfaces.Electronics.PowerGenerator;
import com.planet_ink.coffee_mud.Items.interfaces.Electronics.PowerSource;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.threads.*;
import com.planet_ink.coffee_mud.core.collections.*;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.concurrent.atomic.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class GroundWired extends StdLibrary implements TechLibrary
{
@Override public String ID(){return "GroundWired";}
protected Manufacturer defaultManufacturer=null; // must always be DefaultManufacturer, w/o changes.
protected final Map<String,Manufacturer> manufacturers = new SHashtable<String,Manufacturer>();
protected final Map<String,LinkedList<WeakReference<Electronics>>> sets=new Hashtable<String,LinkedList<WeakReference<Electronics>>>();
protected final Map<PowerGenerator,Pair<List<PowerSource>,List<Electronics>>> currents = new STreeMap<PowerGenerator,Pair<List<PowerSource>,List<Electronics>>>();
protected final static List<PowerGenerator> emptyGeneratorList=new ArrayList<PowerGenerator>();
protected final AtomicInteger nextKey = new AtomicInteger(0);
public int globalTechLevel = 0;
public long globalTechReachedOn=0;
protected CMMsg powerMsg = null;
@Override
public void initializeClass()
{
super.initializeClass();
loadAllManufacturers();
globalTechLevel=CMath.s_int(Resources.getPropResource("TECH", "GLOBALLEVEL"));
globalTechReachedOn=CMath.s_long(Resources.getPropResource("TECH", "GLOBALREACHEDON"));
}
@Override
public int getGlobalTechLevel()
{
return globalTechLevel;
}
@Override
public int getRandomGlobalTechLevel()
{
return CMLib.dice().rollLow(1, 10, globalTechLevel-1);
}
protected void bumpTechLevel()
{
globalTechLevel++;
Resources.setPropResource("TECH", "GLOBALLEVEL",""+globalTechLevel);
Resources.setPropResource("TECH", "GLOBALREACHEDON","0");
}
@Override
public void fixItemTechLevel(Electronics I)
{
if((!CMSecurity.isDisabled(CMSecurity.DisFlag.TECHLEVEL)) && (I.getManufacturerName().equalsIgnoreCase("RANDOM")))
{
I.getFinalManufacturer();
I.setTechLevel(getRandomGlobalTechLevel());
final String oldName=I.Name();
String newName=CMLib.english().startWithAorAn(I.getFinalManufacturer().name()+" "+CMLib.english().cleanArticles(oldName));
I.setName(newName);
final String[] marks=CMProps.getListFileStringList(CMProps.ListFile.TECH_LEVEL_NAMES);
if(marks.length>0)
newName+=" "+marks[I.techLevel()%marks.length];
if(I.displayText().indexOf(oldName)>0)
I.setDisplayText(CMStrings.replaceAll(I.displayText(), oldName, newName));
else
I.setDisplayText(_("@x1 is here.",newName));
}
}
@Override
public synchronized String registerElectrics(final Electronics E, final String oldKey)
{
final ItemPossessor possessor=(E==null)?null:E.owner();
if((E != null) && (possessor instanceof Room))
{
final Room R=(Room)possessor;
String newKey;
if(R.getArea() instanceof SpaceShip)
{
newKey=R.getArea().Name();
final String registryNum=R.getArea().getBlurbFlag("REGISTRY");
if(registryNum!=null)
newKey+=registryNum;
}
else
{
final LandTitle title = CMLib.law().getLandTitle(R);
if(title != null)
newKey=title.getUniqueLotID();
else
newKey=CMLib.map().getExtendedRoomID(R);
}
newKey=newKey.toLowerCase();
if(oldKey!=null)
{
if(newKey.equalsIgnoreCase(oldKey))
return oldKey.toLowerCase();
unregisterElectronics(E,oldKey);
}
LinkedList<WeakReference<Electronics>> set=sets.get(newKey);
if(set==null)
{
set=new LinkedList<WeakReference<Electronics>>();
sets.put(newKey, set);
}
set.add(new WeakReference<Electronics>(E));
return newKey;
}
return null;
}
@Override
public synchronized List<Electronics> getMakeRegisteredElectronics(String key)
{
final LinkedList<WeakReference<Electronics>> set=sets.get(key.toLowerCase());
final LinkedList<Electronics> list=new LinkedList<Electronics>();
if(set==null)
return list;
for(final WeakReference<Electronics> e : set)
if(e.get()!=null)
list.add(e.get());
return list;
}
@Override
public synchronized List<String> getMakeRegisteredKeys()
{
final List<String> keys=new Vector<String>(sets.size());
keys.addAll(sets.keySet());
return keys;
}
@Override
public synchronized void unregisterElectronics(final Electronics E, final String oldKey)
{
if((oldKey!=null)&&(E!=null))
{
final LinkedList<WeakReference<Electronics>> oldSet=sets.get(oldKey.toLowerCase());
if(oldSet!=null)
{
for(final Iterator<WeakReference<Electronics>> e=oldSet.iterator();e.hasNext();)
{
final WeakReference<Electronics> w=e.next();
if(w.get()==E)
{
e.remove();
break;
}
}
if(oldSet.size()==0)
sets.remove(oldKey);
}
}
}
@Override
public synchronized void unregisterAllElectronics(final String oldKey)
{
if(oldKey!=null)
{
final LinkedList<WeakReference<Electronics>> oldSet=sets.get(oldKey.toLowerCase());
if(oldSet!=null)
sets.remove(oldKey);
}
}
@Override
public TickClient getServiceClient()
{
return serviceClient;
}
protected final static Iterator<Electronics.Computer> emptyComputerIterator= new Iterator<Electronics.Computer>()
{
@Override public boolean hasNext() { return false; }
@Override public Computer next() { return null; }
@Override public void remove() { }
};
protected final static Iterator<Room> emptyComputerRoomIterator= new Iterator<Room>()
{
@Override public boolean hasNext() { return false; }
@Override public Room next() { return null; }
@Override public void remove() { }
};
protected final static Filterer<WeakReference<Electronics>> computerFilterer=new Filterer<WeakReference<Electronics>>()
{
@Override public boolean passesFilter(WeakReference<Electronics> obj)
{
return obj.get() instanceof Electronics.Computer;
}
};
protected final static Converter<WeakReference<Electronics>,Electronics.Computer> computerConverter=new Converter<WeakReference<Electronics>,Electronics.Computer>()
{
@Override public Electronics.Computer convert(WeakReference<Electronics> obj) { return (Electronics.Computer)obj.get(); }
};
protected final static Converter<Electronics.Computer,Room> computerRoomConverter=new Converter<Electronics.Computer,Room>()
{
@Override
public Room convert(Electronics.Computer obj)
{
return CMLib.map().roomLocation(obj);
}
};
@Override
public synchronized Iterator<Electronics.Computer> getComputers(String key)
{
final LinkedList<WeakReference<Electronics>> oldSet=sets.get(key.toLowerCase());
if(oldSet==null)
return emptyComputerIterator;
return new ConvertingIterator<WeakReference<Electronics>,Electronics.Computer>(new FilteredIterator<WeakReference<Electronics>>(oldSet.iterator(), computerFilterer),computerConverter);
}
@Override
public synchronized Iterator<Room> getComputerRooms(String key)
{
return new FilteredIterator<Room>(new ConvertingIterator<Electronics.Computer,Room>(getComputers(key),computerRoomConverter), new Filterer<Room>()
{
private final Set<Room> done=new HashSet<Room>();
@Override public boolean passesFilter(Room obj)
{
if(done.contains(obj))
return false;
done.add(obj);
return true;
}
});
}
protected CMMsg getPowerMsg(int powerAmt)
{
if(powerMsg==null)
{
final MOB powerMOB=CMClass.getMOB("StdMOB");
powerMOB.baseCharStats().setMyRace(CMClass.getRace("ElectricalElemental"));
powerMOB.setSavable(false);
powerMOB.setLocation(CMLib.map().getRandomRoom());
powerMOB.recoverCharStats();
powerMsg=CMClass.getMsg(powerMOB, CMMsg.MSG_POWERCURRENT, null);
}
powerMsg.setValue(powerAmt);
return powerMsg;
}
@Override
public boolean activate()
{
if(serviceClient==null)
{
name="THWired"+Thread.currentThread().getThreadGroup().getName().charAt(0);
serviceClient=CMLib.threads().startTickDown(this, Tickable.TICKID_SUPPORT|Tickable.TICKID_SOLITARYMASK, CMProps.getTickMillis(), 1);
}
return true;
}
public void runSpace()
{
for(final Enumeration<SpaceObject> o = CMLib.map().getSpaceObjects(); o.hasMoreElements(); )
{
final SpaceObject O=o.nextElement();
if(!(O instanceof Area))
{
if((O instanceof SpaceShip)
&&(((SpaceShip)O).getShipArea()!=null)
&&(((SpaceShip)O).getShipArea().getAreaState()!=Area.State.ACTIVE))
continue;
BoundedCube cube=O.getBounds();
if(O.speed()>0)
{
CMLib.map().moveSpaceObject(O);
cube=cube.expand(O.direction(),O.speed());
}
final List<SpaceObject> cOs=CMLib.map().getSpaceObjectsWithin(O, 0, SpaceObject.Distance.LightMinute.dm);
for(final SpaceObject cO : cOs)
if(cO != O)
{
if(((cO instanceof Area)||(cO.getMass() > (SpaceObject.MULTIPLIER_PLANET_MASS/4)))
&&((CMLib.map().getDistanceFrom(O, cO)-cO.radius())<=(cO.radius()*SpaceObject.MULTIPLIER_GRAVITY_RADIUS))
&&(!cO.getBounds().intersects(cube)))
{
final double[] directionTo=CMLib.map().getDirection(O, cO);
CMLib.map().moveSpaceObject(O, directionTo, SpaceObject.ACCELLERATION_G);
//TODO: if direction is now mostly in the direction of gravity, consider landing.
//TODO: gravity
}
if(cO.getBounds().intersects(cube))
{
//TODO: we have a collision! or landing
// if destroyed, break
}
//TODO: we might also have a landing, or something near one...
// maybe good to use the entryset<o,list> so you always have
// the nearby things.
// this is important because this needs to do gravity also.
// do gravity first.
// when moving ships, collisions are better if you are looking
// in a radius that includes the speed.
}
}
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
try
{
if(!CMSecurity.isDisabled(CMSecurity.DisFlag.ELECTRICTHREAD))
{
isDebugging=CMSecurity.isDebugging(DbgFlag.UTILITHREAD);
tickStatus=Tickable.STATUS_ALIVE;
try
{
runElectricCurrents();
}
finally
{
runSpace();
}
}
}
finally
{
tickStatus=Tickable.STATUS_NOT;
setThreadStatus(serviceClient,"sleeping");
}
return true;
}
@Override
public boolean shutdown()
{
sets.clear();
manufacturers.clear();
if(CMLib.threads().isTicking(this, TICKID_SUPPORT|Tickable.TICKID_SOLITARYMASK))
{
CMLib.threads().deleteTick(this, TICKID_SUPPORT|Tickable.TICKID_SOLITARYMASK);
serviceClient=null;
}
return true;
}
protected void processElectricCurrents(final List<PowerGenerator> generators, final List<PowerSource> batteries, final List<ElecPanel> panels) throws Exception
{
final CMMsg powerMsg=getPowerMsg(0);
for(final PowerGenerator E : generators)
{
powerMsg.setTarget(E);
powerMsg.setValue(0);
final Room R=CMLib.map().roomLocation(E);
if((R!=null)&&(R.okMessage(powerMsg.source(), powerMsg)))
R.send(powerMsg.source(), powerMsg);
}
long remainingPowerToDistribute=0;
long availablePowerToDistribute=0;
for(final PowerGenerator G : generators)
if(G.activated())
{
availablePowerToDistribute+=G.powerRemaining();
G.setPowerRemaining(0);
}
for(final PowerSource B : batteries)
if(B.activated())
availablePowerToDistribute+=B.powerRemaining();
if(availablePowerToDistribute==0)
{
for(final ElecPanel E : panels)
{
powerMsg.setTarget(E);
powerMsg.setValue(0);
final Room R=CMLib.map().roomLocation(E);
if((R!=null)&&(R.okMessage(powerMsg.source(), powerMsg)))
R.send(powerMsg.source(), powerMsg);
}
for(final PowerSource E : batteries)
{
powerMsg.setTarget(E);
powerMsg.setValue(0);
final Room R=CMLib.map().roomLocation(E);
if((R!=null)&&(R.okMessage(powerMsg.source(), powerMsg)))
R.send(powerMsg.source(), powerMsg);
}
}
else
{
remainingPowerToDistribute=availablePowerToDistribute;
double totalPowerNeeded=0.0;
for(final ElecPanel E : panels)
totalPowerNeeded+=((E.powerNeeds()<=0)?1.0:E.powerNeeds());
if(totalPowerNeeded>0.0)
{
for(final ElecPanel E : panels)
{
powerMsg.setTarget(E);
int powerToTake=0;
if(remainingPowerToDistribute>0)
{
final double pctToTake=CMath.div(((E.powerNeeds()<=0)?1:E.powerNeeds()),totalPowerNeeded);
powerToTake=(int)Math.round(pctToTake * remainingPowerToDistribute);
if(powerToTake<1)
powerToTake=1;
}
powerMsg.setValue(powerToTake);
final Room R=CMLib.map().roomLocation(E);
if((R!=null)&&(R.okMessage(powerMsg.source(), powerMsg)))
R.send(powerMsg.source(), powerMsg);
remainingPowerToDistribute-=(powerMsg.value()<0)?powerToTake:(powerToTake-powerMsg.value());
}
}
int batteriesLeft=batteries.size();
for(final PowerSource E : batteries)
{
powerMsg.setTarget(E);
final int amountToDistribute=(int)(remainingPowerToDistribute/batteriesLeft);
powerMsg.setValue(amountToDistribute<0?0:amountToDistribute);
final Room R=CMLib.map().roomLocation(E);
if((R!=null)&&(R.okMessage(powerMsg.source(), powerMsg)))
R.send(powerMsg.source(), powerMsg);
batteriesLeft--;
remainingPowerToDistribute-=(powerMsg.value()<0)?amountToDistribute:(amountToDistribute-powerMsg.value());
}
if(generators.size()>0)
{
final int amountLeftOver=(int)((availablePowerToDistribute-remainingPowerToDistribute)/generators.size());
for(final PowerGenerator G : generators)
if(G.activated())
G.setPowerRemaining(amountLeftOver>G.powerCapacity()?G.powerCapacity():amountLeftOver);
}
}
}
protected Area fillCurrentLists(final String key, final List<PowerGenerator> generators, final List<PowerSource> batteries, final List<ElecPanel> panels)
{
Area areaLocation=null;
synchronized(this)
{
final LinkedList<WeakReference<Electronics>> rawSet=sets.get(key.toLowerCase());
if(rawSet!=null)
{
for(final Iterator<WeakReference<Electronics>> w=rawSet.iterator(); w.hasNext(); )
{
final WeakReference<Electronics> W=w.next();
final Electronics E=W.get();
if(E==null)
w.remove();
else
{
if(E instanceof PowerGenerator)
generators.add((PowerGenerator)E);
else
if(E instanceof PowerSource)
batteries.add((PowerSource)E);
else
if(E instanceof ElecPanel)
panels.add((ElecPanel)E);
else
if((E.owner() instanceof ElecPanel)&&(!rawSet.contains(E.owner())))
panels.add((ElecPanel)E.owner());
if(areaLocation == null)
areaLocation=CMLib.map().areaLocation(E);
}
}
if(rawSet.size()==0)
sets.remove(key);
}
}
return areaLocation;
}
@Override
public boolean isCurrentActive(final String key)
{
try
{
synchronized(this)
{
final LinkedList<WeakReference<Electronics>> rawSet=sets.get(key.toLowerCase());
if(rawSet!=null)
{
for(final Iterator<WeakReference<Electronics>> w=rawSet.iterator(); w.hasNext(); )
{
final WeakReference<Electronics> W=w.next();
final Electronics E=W.get();
if(E==null)
w.remove();
else
{
final Area A=CMLib.map().areaLocation(E);
if(A!=null) return A.getAreaState()==Area.State.ACTIVE;
}
}
if(rawSet.size()==0)
sets.remove(key);
}
}
}
catch(final Exception e)
{
Log.errOut("GroundWired",e);
}
return true;
}
protected void runElectricCurrent(final String key)
{
try
{
final List<PowerGenerator> generators = new LinkedList<PowerGenerator>();
final List<PowerSource> batteries = new LinkedList<PowerSource>();
final List<ElecPanel> panels = new LinkedList<ElecPanel>();
final Area A=fillCurrentLists(key,generators,batteries,panels);
if((A!=null)&&(A.getAreaState()!=Area.State.ACTIVE))
return;
processElectricCurrents(generators, batteries, panels);
}
catch(final Exception e)
{
Log.errOut("GroundWired",e);
}
}
@Override
public boolean seekBatteryPower(final ElecPanel E, final String key)
{
final List<PowerGenerator> generators = new LinkedList<PowerGenerator>();
final List<PowerSource> batteries = new LinkedList<PowerSource>();
final List<ElecPanel> panels = new LinkedList<ElecPanel>();
fillCurrentLists(key,generators,batteries,panels);
PowerSource battery = null;
final Room locR=CMLib.map().roomLocation(E);
for(final PowerSource S : batteries)
{
if((!S.activated())&&(S.powerRemaining()>0))
{
final MOB M=CMLib.map().getFactoryMOB(locR);
final CMMsg activateMsg = CMClass.getMsg(M, S, null, CMMsg.MASK_ALWAYS|CMMsg.MASK_CNTRLMSG|CMMsg.MSG_ACTIVATE,null);
if(locR.okMessage(M, activateMsg))
{
locR.send(M, activateMsg);
if(S.activated())
{
battery=S;
break;
}
else
{
synchronized(this)
{
final LinkedList<WeakReference<Electronics>> rawSet=sets.get(key.toLowerCase());
if((rawSet!=null) && (rawSet.size()>0) && (rawSet.getLast().get() != S))
{
for(final Iterator<WeakReference<Electronics>> w=rawSet.iterator(); w.hasNext(); )
{
final WeakReference<Electronics> W=w.next();
if(W.get()==S)
{
w.remove();
break;
}
}
rawSet.addLast(new WeakReference<Electronics>(S));
}
}
}
}
}
}
if(battery==null)
{
return false;
}
try
{
final List<ElecPanel> finalPanel=new XVector<ElecPanel>(E);
final List<PowerSource> finalBatteries=new XVector<PowerSource>(battery);
processElectricCurrents(emptyGeneratorList, finalBatteries, finalPanel);
return true;
}
catch(final Exception e)
{
Log.errOut("GroundWired",e);
return false;
}
}
protected void runElectricCurrents()
{
setThreadStatus(serviceClient,"pushing electric currents");
List<String> keys;
synchronized(this)
{
keys=new XVector<String>(sets.keySet());
}
for(final String key : keys)
{
runElectricCurrent(key);
}
setThreadStatus(serviceClient,"sleeping");
}
@Override
public Manufacturer getDefaultManufacturer()
{
if(defaultManufacturer==null)
defaultManufacturer=(Manufacturer)CMClass.getCommon("DefaultManufacturer");
return defaultManufacturer;
}
@Override
public void addManufacturer(Manufacturer manufacturer)
{
if((manufacturer==null)||(manufacturer==defaultManufacturer)) return;
manufacturers.put(manufacturer.name().toUpperCase().trim(), manufacturer);
saveAllManufacturers();
}
@Override
public void delManufacturer(Manufacturer manufacturer)
{
if((manufacturer==null)||(manufacturer==defaultManufacturer)) return;
final Manufacturer found=getManufacturer(manufacturer.name());
if(found==manufacturer)
manufacturers.remove(manufacturer.name().toUpperCase().trim());
saveAllManufacturers();
}
@Override
public void updateManufacturer(Manufacturer manufacturer)
{
if((manufacturer==null)||(manufacturer==defaultManufacturer)) return;
final Manufacturer found=getManufacturer(manufacturer.name());
if((found==null)||(found!=manufacturer))
{
for(final String manName : manufacturers.keySet())
if(manufacturers.get(manName)==manufacturer)
{
manufacturers.remove(manName);
break;
}
addManufacturer(manufacturer);
}
saveAllManufacturers();
}
@Override
public Manufacturer getManufacturer(String name)
{
if(name==null) return null;
if(name.equals("RANDOM"))
return null;
return manufacturers.get(name.toUpperCase().trim());
}
@Override
public Manufacturer getManufacturerOf(Electronics E, String name)
{
if(name==null) return null;
if(manufacturers.size()==0)
return getDefaultManufacturer();
if(name.equals("RANDOM"))
{
if(E==null)
return null;
final List<Manufacturer> subManufacturers=new ArrayList<Manufacturer>();
for(final Manufacturer f : manufacturers.values())
if(CMLib.masking().maskCheck(f.getItemMask(), E, true))
subManufacturers.add(f);
for(final Iterator<Manufacturer> f =subManufacturers.iterator();f.hasNext();)
{
final Manufacturer M=f.next();
if((E.techLevel() < globalTechLevel+M.getMinTechLevelDiff())
||(E.techLevel() > globalTechLevel+M.getMaxTechLevelDiff()))
f.remove();
}
if(subManufacturers.size()==0)
return getDefaultManufacturer();
return subManufacturers.get(CMLib.dice().roll(1, subManufacturers.size(), -1));
}
return manufacturers.get(name.toUpperCase().trim());
}
@Override
public Iterator<Manufacturer> manufacterers()
{
return new ReadOnlyIterator<Manufacturer>(manufacturers.values().iterator());
}
protected String getManufacturersFilename()
{
return "/resources/tech/manufacturers.xml";
}
protected synchronized void saveAllManufacturers()
{
final String filename=getManufacturersFilename();
CMFile xmlFile=new CMFile(filename, null, CMFile.FLAG_FORCEALLOW);
if(!xmlFile.exists())
xmlFile=new CMFile("::"+filename, null, CMFile.FLAG_FORCEALLOW);
final StringBuilder xmlStr=new StringBuilder("<MANUFACTURERS>");
for(final Manufacturer man : manufacturers.values())
if(man != defaultManufacturer)
xmlStr.append("<MANUFACTURER>").append(man.getXml()).append("</MANUFACTURER>");
xmlStr.append("</MANUFACTURERS>");
xmlFile.saveText(xmlStr.toString());
}
protected void loadAllManufacturers()
{
final String filename=getManufacturersFilename();
CMFile xmlFile=new CMFile(filename, null, CMFile.FLAG_FORCEALLOW);
if((!xmlFile.exists())||(!xmlFile.canRead()))
xmlFile=new CMFile("/resources/examples/manufacturers.xml", null, CMFile.FLAG_FORCEALLOW);
manufacturers.clear();
if(xmlFile.exists() && xmlFile.canRead())
{
final List<XMLLibrary.XMLpiece> xDoc=CMLib.xml().parseAllXML(xmlFile.text());
final List<XMLLibrary.XMLpiece> xMans=new SLinkedList<XMLLibrary.XMLpiece>();
for(final XMLLibrary.XMLpiece x : xDoc)
if(x.tag.equalsIgnoreCase("MANUFACTURER"))
xMans.add(x);
else
if(x.tag.equalsIgnoreCase("MANUFACTURERS"))
xMans.addAll(x.contents);
for(final XMLLibrary.XMLpiece x : xMans)
{
final Manufacturer man =(Manufacturer)CMClass.getCommon("DefaultManufacturer");
man.setXml(x.value);
addManufacturer(man);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.transaction.impl;
import javax.transaction.xa.Xid;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.core.io.IOCallback;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.core.server.Queue;
import org.apache.activemq.artemis.core.server.impl.RefsOperation;
import org.apache.activemq.artemis.core.transaction.Transaction;
import org.apache.activemq.artemis.core.transaction.TransactionOperation;
public class TransactionImpl implements Transaction {
private List<TransactionOperation> operations;
private static final int INITIAL_NUM_PROPERTIES = 10;
private Object[] properties = new Object[TransactionImpl.INITIAL_NUM_PROPERTIES];
protected final StorageManager storageManager;
private final Xid xid;
private final long id;
private volatile State state = State.ACTIVE;
private ActiveMQException exception;
private final Object timeoutLock = new Object();
private final long createTime;
private volatile boolean containsPersistent;
private int timeoutSeconds = -1;
public TransactionImpl(final StorageManager storageManager, final int timeoutSeconds) {
this.storageManager = storageManager;
xid = null;
id = storageManager.generateID();
createTime = System.currentTimeMillis();
this.timeoutSeconds = timeoutSeconds;
}
public TransactionImpl(final StorageManager storageManager) {
this.storageManager = storageManager;
xid = null;
id = storageManager.generateID();
createTime = System.currentTimeMillis();
}
public TransactionImpl(final Xid xid, final StorageManager storageManager, final int timeoutSeconds) {
this.storageManager = storageManager;
this.xid = xid;
id = storageManager.generateID();
createTime = System.currentTimeMillis();
this.timeoutSeconds = timeoutSeconds;
}
public TransactionImpl(final long id, final Xid xid, final StorageManager storageManager) {
this.storageManager = storageManager;
this.xid = xid;
this.id = id;
createTime = System.currentTimeMillis();
}
// Transaction implementation
// -----------------------------------------------------------
public void setContainsPersistent() {
containsPersistent = true;
}
public boolean isContainsPersistent() {
return containsPersistent;
}
public void setTimeout(final int timeout) {
this.timeoutSeconds = timeout;
}
@Override
public RefsOperation createRefsOperation(Queue queue) {
return new RefsOperation(queue, storageManager);
}
public long getID() {
return id;
}
public long getCreateTime() {
return createTime;
}
public boolean hasTimedOut(final long currentTime, final int defaultTimeout) {
if (timeoutSeconds == -1) {
return getState() != Transaction.State.PREPARED && currentTime > createTime + defaultTimeout * 1000;
}
else {
return getState() != Transaction.State.PREPARED && currentTime > createTime + timeoutSeconds * 1000;
}
}
public void prepare() throws Exception {
storageManager.readLock();
try {
synchronized (timeoutLock) {
if (state == State.ROLLBACK_ONLY) {
if (exception != null) {
// this TX will never be rolled back,
// so we reset it now
beforeRollback();
afterRollback();
if (operations != null) {
operations.clear();
}
throw exception;
}
else {
// Do nothing
return;
}
}
else if (state != State.ACTIVE) {
throw new IllegalStateException("Transaction is in invalid state " + state);
}
if (xid == null) {
throw new IllegalStateException("Cannot prepare non XA transaction");
}
beforePrepare();
storageManager.prepare(id, xid);
state = State.PREPARED;
// We use the Callback even for non persistence
// If we are using non-persistence with replication, the replication manager will have
// to execute this runnable in the correct order
storageManager.afterCompleteOperations(new IOCallback() {
public void onError(final int errorCode, final String errorMessage) {
ActiveMQServerLogger.LOGGER.ioErrorOnTX(errorCode, errorMessage);
}
public void done() {
afterPrepare();
}
});
}
}
finally {
storageManager.readUnLock();
}
}
public void commit() throws Exception {
commit(true);
}
public void commit(final boolean onePhase) throws Exception {
synchronized (timeoutLock) {
if (state == State.ROLLBACK_ONLY) {
rollback();
if (exception != null) {
throw exception;
}
else {
// Do nothing
return;
}
}
if (xid != null) {
if (onePhase && state != State.ACTIVE || !onePhase && state != State.PREPARED) {
throw new IllegalStateException("Transaction is in invalid state " + state);
}
}
else {
if (state != State.ACTIVE) {
throw new IllegalStateException("Transaction is in invalid state " + state);
}
}
beforeCommit();
doCommit();
// We use the Callback even for non persistence
// If we are using non-persistence with replication, the replication manager will have
// to execute this runnable in the correct order
// This also will only use a different thread if there are any IO pending.
// If the IO finished early by the time we got here, we won't need an executor
storageManager.afterCompleteOperations(new IOCallback() {
public void onError(final int errorCode, final String errorMessage) {
ActiveMQServerLogger.LOGGER.ioErrorOnTX(errorCode, errorMessage);
}
public void done() {
afterCommit();
}
});
}
}
/**
* @throws Exception
*/
protected void doCommit() throws Exception {
if (containsPersistent || xid != null && state == State.PREPARED) {
storageManager.commit(id);
state = State.COMMITTED;
}
}
public void rollback() throws Exception {
synchronized (timeoutLock) {
if (xid != null) {
if (state != State.PREPARED && state != State.ACTIVE && state != State.ROLLBACK_ONLY) {
throw new IllegalStateException("Transaction is in invalid state " + state);
}
}
else {
if (state != State.ACTIVE && state != State.ROLLBACK_ONLY) {
throw new IllegalStateException("Transaction is in invalid state " + state);
}
}
beforeRollback();
doRollback();
state = State.ROLLEDBACK;
// We use the Callback even for non persistence
// If we are using non-persistence with replication, the replication manager will have
// to execute this runnable in the correct order
storageManager.afterCompleteOperations(new IOCallback() {
public void onError(final int errorCode, final String errorMessage) {
ActiveMQServerLogger.LOGGER.ioErrorOnTX(errorCode, errorMessage);
}
public void done() {
afterRollback();
}
});
}
}
public void suspend() {
if (state != State.ACTIVE) {
throw new IllegalStateException("Can only suspend active transaction");
}
state = State.SUSPENDED;
}
public void resume() {
if (state != State.SUSPENDED) {
throw new IllegalStateException("Can only resume a suspended transaction");
}
state = State.ACTIVE;
}
public Transaction.State getState() {
return state;
}
public void setState(final State state) {
this.state = state;
}
public Xid getXid() {
return xid;
}
public void markAsRollbackOnly(final ActiveMQException exception1) {
if (ActiveMQServerLogger.LOGGER.isDebugEnabled()) {
ActiveMQServerLogger.LOGGER.debug("Marking Transaction " + this.id + " as rollback only");
}
state = State.ROLLBACK_ONLY;
this.exception = exception1;
}
public synchronized void addOperation(final TransactionOperation operation) {
checkCreateOperations();
operations.add(operation);
}
private int getOperationsCount() {
checkCreateOperations();
return operations.size();
}
public synchronized List<TransactionOperation> getAllOperations() {
return new ArrayList<TransactionOperation>(operations);
}
public void putProperty(final int index, final Object property) {
if (index >= properties.length) {
Object[] newProperties = new Object[index];
System.arraycopy(properties, 0, newProperties, 0, properties.length);
properties = newProperties;
}
properties[index] = property;
}
public Object getProperty(final int index) {
return properties[index];
}
// Private
// -------------------------------------------------------------------
private void doRollback() throws Exception {
if (containsPersistent || xid != null && state == State.PREPARED) {
storageManager.rollback(id);
}
}
private void checkCreateOperations() {
if (operations == null) {
operations = new ArrayList<TransactionOperation>();
}
}
private synchronized void afterCommit() {
if (operations != null) {
for (TransactionOperation operation : operations) {
operation.afterCommit(this);
}
}
}
private synchronized void afterRollback() {
if (operations != null) {
for (TransactionOperation operation : operations) {
operation.afterRollback(this);
}
}
}
private synchronized void beforeCommit() throws Exception {
if (operations != null) {
for (TransactionOperation operation : operations) {
operation.beforeCommit(this);
}
}
}
private synchronized void beforePrepare() throws Exception {
if (operations != null) {
for (TransactionOperation operation : operations) {
operation.beforePrepare(this);
}
}
}
private synchronized void beforeRollback() throws Exception {
if (operations != null) {
for (TransactionOperation operation : operations) {
operation.beforeRollback(this);
}
}
}
private synchronized void afterPrepare() {
if (operations != null) {
for (TransactionOperation operation : operations) {
operation.afterPrepare(this);
}
}
}
@Override
public String toString() {
Date dt = new Date(this.createTime);
return "TransactionImpl [xid=" + xid +
", id=" +
id +
", state=" +
state +
", createTime=" +
createTime + "(" + dt + ")" +
", timeoutSeconds=" +
timeoutSeconds +
", nr operations = " + getOperationsCount() +
"]@" +
Integer.toHexString(hashCode());
}
}
| |
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.ByteProcessor;
import io.netty.util.ResourceLeak;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
final class AdvancedLeakAwareByteBuf extends WrappedByteBuf {
private static final String PROP_ACQUIRE_AND_RELEASE_ONLY = "io.netty.leakDetection.acquireAndReleaseOnly";
private static final boolean ACQUIRE_AND_RELEASE_ONLY;
private static final InternalLogger logger = InternalLoggerFactory.getInstance(AdvancedLeakAwareByteBuf.class);
static {
ACQUIRE_AND_RELEASE_ONLY = SystemPropertyUtil.getBoolean(PROP_ACQUIRE_AND_RELEASE_ONLY, false);
if (logger.isDebugEnabled()) {
logger.debug("-D{}: {}", PROP_ACQUIRE_AND_RELEASE_ONLY, ACQUIRE_AND_RELEASE_ONLY);
}
}
private final ResourceLeak leak;
AdvancedLeakAwareByteBuf(ByteBuf buf, ResourceLeak leak) {
super(buf);
this.leak = leak;
}
static void recordLeakNonRefCountingOperation(ResourceLeak leak) {
if (!ACQUIRE_AND_RELEASE_ONLY) {
leak.record();
}
}
@Override
public ByteBuf order(ByteOrder endianness) {
recordLeakNonRefCountingOperation(leak);
if (order() == endianness) {
return this;
} else {
return new AdvancedLeakAwareByteBuf(super.order(endianness), leak);
}
}
@Override
public ByteBuf slice() {
recordLeakNonRefCountingOperation(leak);
return new AdvancedLeakAwareByteBuf(super.slice(), leak);
}
@Override
public ByteBuf slice(int index, int length) {
recordLeakNonRefCountingOperation(leak);
return new AdvancedLeakAwareByteBuf(super.slice(index, length), leak);
}
@Override
public ByteBuf duplicate() {
recordLeakNonRefCountingOperation(leak);
return new AdvancedLeakAwareByteBuf(super.duplicate(), leak);
}
@Override
public ByteBuf readSlice(int length) {
recordLeakNonRefCountingOperation(leak);
return new AdvancedLeakAwareByteBuf(super.readSlice(length), leak);
}
@Override
public ByteBuf discardReadBytes() {
recordLeakNonRefCountingOperation(leak);
return super.discardReadBytes();
}
@Override
public ByteBuf discardSomeReadBytes() {
recordLeakNonRefCountingOperation(leak);
return super.discardSomeReadBytes();
}
@Override
public ByteBuf ensureWritable(int minWritableBytes) {
recordLeakNonRefCountingOperation(leak);
return super.ensureWritable(minWritableBytes);
}
@Override
public int ensureWritable(int minWritableBytes, boolean force) {
recordLeakNonRefCountingOperation(leak);
return super.ensureWritable(minWritableBytes, force);
}
@Override
public boolean getBoolean(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getBoolean(index);
}
@Override
public byte getByte(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getByte(index);
}
@Override
public short getUnsignedByte(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedByte(index);
}
@Override
public short getShort(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getShort(index);
}
@Override
public int getUnsignedShort(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedShort(index);
}
@Override
public int getMedium(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getMedium(index);
}
@Override
public int getUnsignedMedium(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedMedium(index);
}
@Override
public int getInt(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getInt(index);
}
@Override
public long getUnsignedInt(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedInt(index);
}
@Override
public long getLong(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getLong(index);
}
@Override
public char getChar(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getChar(index);
}
@Override
public float getFloat(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getFloat(index);
}
@Override
public double getDouble(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getDouble(index);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst) {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, dst);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int length) {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, dst, length);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, dst, dstIndex, length);
}
@Override
public ByteBuf getBytes(int index, byte[] dst) {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, dst);
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, dst, dstIndex, length);
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, dst);
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, out, length);
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, out, length);
}
@Override
public ByteBuf setBoolean(int index, boolean value) {
recordLeakNonRefCountingOperation(leak);
return super.setBoolean(index, value);
}
@Override
public ByteBuf setByte(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setByte(index, value);
}
@Override
public ByteBuf setShort(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setShort(index, value);
}
@Override
public ByteBuf setMedium(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setMedium(index, value);
}
@Override
public ByteBuf setInt(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setInt(index, value);
}
@Override
public ByteBuf setLong(int index, long value) {
recordLeakNonRefCountingOperation(leak);
return super.setLong(index, value);
}
@Override
public ByteBuf setChar(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setChar(index, value);
}
@Override
public ByteBuf setFloat(int index, float value) {
recordLeakNonRefCountingOperation(leak);
return super.setFloat(index, value);
}
@Override
public ByteBuf setDouble(int index, double value) {
recordLeakNonRefCountingOperation(leak);
return super.setDouble(index, value);
}
@Override
public ByteBuf setBytes(int index, ByteBuf src) {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, src);
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int length) {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, src, length);
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, src, srcIndex, length);
}
@Override
public ByteBuf setBytes(int index, byte[] src) {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, src);
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, src, srcIndex, length);
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, src);
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, in, length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, in, length);
}
@Override
public ByteBuf setZero(int index, int length) {
recordLeakNonRefCountingOperation(leak);
return super.setZero(index, length);
}
@Override
public boolean readBoolean() {
recordLeakNonRefCountingOperation(leak);
return super.readBoolean();
}
@Override
public byte readByte() {
recordLeakNonRefCountingOperation(leak);
return super.readByte();
}
@Override
public short readUnsignedByte() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedByte();
}
@Override
public short readShort() {
recordLeakNonRefCountingOperation(leak);
return super.readShort();
}
@Override
public int readUnsignedShort() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedShort();
}
@Override
public int readMedium() {
recordLeakNonRefCountingOperation(leak);
return super.readMedium();
}
@Override
public int readUnsignedMedium() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedMedium();
}
@Override
public int readInt() {
recordLeakNonRefCountingOperation(leak);
return super.readInt();
}
@Override
public long readUnsignedInt() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedInt();
}
@Override
public long readLong() {
recordLeakNonRefCountingOperation(leak);
return super.readLong();
}
@Override
public char readChar() {
recordLeakNonRefCountingOperation(leak);
return super.readChar();
}
@Override
public float readFloat() {
recordLeakNonRefCountingOperation(leak);
return super.readFloat();
}
@Override
public double readDouble() {
recordLeakNonRefCountingOperation(leak);
return super.readDouble();
}
@Override
public ByteBuf readBytes(int length) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(length);
}
@Override
public ByteBuf readBytes(ByteBuf dst) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(dst);
}
@Override
public ByteBuf readBytes(ByteBuf dst, int length) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(dst, length);
}
@Override
public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(dst, dstIndex, length);
}
@Override
public ByteBuf readBytes(byte[] dst) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(dst);
}
@Override
public ByteBuf readBytes(byte[] dst, int dstIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(dst, dstIndex, length);
}
@Override
public ByteBuf readBytes(ByteBuffer dst) {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(dst);
}
@Override
public ByteBuf readBytes(OutputStream out, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(out, length);
}
@Override
public int readBytes(GatheringByteChannel out, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(out, length);
}
@Override
public ByteBuf skipBytes(int length) {
recordLeakNonRefCountingOperation(leak);
return super.skipBytes(length);
}
@Override
public ByteBuf writeBoolean(boolean value) {
recordLeakNonRefCountingOperation(leak);
return super.writeBoolean(value);
}
@Override
public ByteBuf writeByte(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeByte(value);
}
@Override
public ByteBuf writeShort(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeShort(value);
}
@Override
public ByteBuf writeMedium(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeMedium(value);
}
@Override
public ByteBuf writeInt(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeInt(value);
}
@Override
public ByteBuf writeLong(long value) {
recordLeakNonRefCountingOperation(leak);
return super.writeLong(value);
}
@Override
public ByteBuf writeChar(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeChar(value);
}
@Override
public ByteBuf writeFloat(float value) {
recordLeakNonRefCountingOperation(leak);
return super.writeFloat(value);
}
@Override
public ByteBuf writeDouble(double value) {
recordLeakNonRefCountingOperation(leak);
return super.writeDouble(value);
}
@Override
public ByteBuf writeBytes(ByteBuf src) {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(src);
}
@Override
public ByteBuf writeBytes(ByteBuf src, int length) {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(src, length);
}
@Override
public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(src, srcIndex, length);
}
@Override
public ByteBuf writeBytes(byte[] src) {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(src);
}
@Override
public ByteBuf writeBytes(byte[] src, int srcIndex, int length) {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(src, srcIndex, length);
}
@Override
public ByteBuf writeBytes(ByteBuffer src) {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(src);
}
@Override
public int writeBytes(InputStream in, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(in, length);
}
@Override
public int writeBytes(ScatteringByteChannel in, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(in, length);
}
@Override
public ByteBuf writeZero(int length) {
recordLeakNonRefCountingOperation(leak);
return super.writeZero(length);
}
@Override
public int indexOf(int fromIndex, int toIndex, byte value) {
recordLeakNonRefCountingOperation(leak);
return super.indexOf(fromIndex, toIndex, value);
}
@Override
public int bytesBefore(byte value) {
recordLeakNonRefCountingOperation(leak);
return super.bytesBefore(value);
}
@Override
public int bytesBefore(int length, byte value) {
recordLeakNonRefCountingOperation(leak);
return super.bytesBefore(length, value);
}
@Override
public int bytesBefore(int index, int length, byte value) {
recordLeakNonRefCountingOperation(leak);
return super.bytesBefore(index, length, value);
}
@Override
public int forEachByte(ByteProcessor processor) {
recordLeakNonRefCountingOperation(leak);
return super.forEachByte(processor);
}
@Override
public int forEachByte(int index, int length, ByteProcessor processor) {
recordLeakNonRefCountingOperation(leak);
return super.forEachByte(index, length, processor);
}
@Override
public int forEachByteDesc(ByteProcessor processor) {
recordLeakNonRefCountingOperation(leak);
return super.forEachByteDesc(processor);
}
@Override
public int forEachByteDesc(int index, int length, ByteProcessor processor) {
recordLeakNonRefCountingOperation(leak);
return super.forEachByteDesc(index, length, processor);
}
@Override
public ByteBuf copy() {
recordLeakNonRefCountingOperation(leak);
return super.copy();
}
@Override
public ByteBuf copy(int index, int length) {
recordLeakNonRefCountingOperation(leak);
return super.copy(index, length);
}
@Override
public int nioBufferCount() {
recordLeakNonRefCountingOperation(leak);
return super.nioBufferCount();
}
@Override
public ByteBuffer nioBuffer() {
recordLeakNonRefCountingOperation(leak);
return super.nioBuffer();
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
recordLeakNonRefCountingOperation(leak);
return super.nioBuffer(index, length);
}
@Override
public ByteBuffer[] nioBuffers() {
recordLeakNonRefCountingOperation(leak);
return super.nioBuffers();
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
recordLeakNonRefCountingOperation(leak);
return super.nioBuffers(index, length);
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
recordLeakNonRefCountingOperation(leak);
return super.internalNioBuffer(index, length);
}
@Override
public String toString(Charset charset) {
recordLeakNonRefCountingOperation(leak);
return super.toString(charset);
}
@Override
public String toString(int index, int length, Charset charset) {
recordLeakNonRefCountingOperation(leak);
return super.toString(index, length, charset);
}
@Override
public ByteBuf capacity(int newCapacity) {
recordLeakNonRefCountingOperation(leak);
return super.capacity(newCapacity);
}
@Override
public short getShortLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getShortLE(index);
}
@Override
public int getUnsignedShortLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedShortLE(index);
}
@Override
public int getMediumLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getMediumLE(index);
}
@Override
public int getUnsignedMediumLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedMediumLE(index);
}
@Override
public int getIntLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getIntLE(index);
}
@Override
public long getUnsignedIntLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getUnsignedIntLE(index);
}
@Override
public long getLongLE(int index) {
recordLeakNonRefCountingOperation(leak);
return super.getLongLE(index);
}
@Override
public ByteBuf setShortLE(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setShortLE(index, value);
}
@Override
public ByteBuf setIntLE(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setIntLE(index, value);
}
@Override
public ByteBuf setMediumLE(int index, int value) {
recordLeakNonRefCountingOperation(leak);
return super.setMediumLE(index, value);
}
@Override
public ByteBuf setLongLE(int index, long value) {
recordLeakNonRefCountingOperation(leak);
return super.setLongLE(index, value);
}
@Override
public short readShortLE() {
recordLeakNonRefCountingOperation(leak);
return super.readShortLE();
}
@Override
public int readUnsignedShortLE() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedShortLE();
}
@Override
public int readMediumLE() {
recordLeakNonRefCountingOperation(leak);
return super.readMediumLE();
}
@Override
public int readUnsignedMediumLE() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedMediumLE();
}
@Override
public int readIntLE() {
recordLeakNonRefCountingOperation(leak);
return super.readIntLE();
}
@Override
public long readUnsignedIntLE() {
recordLeakNonRefCountingOperation(leak);
return super.readUnsignedIntLE();
}
@Override
public long readLongLE() {
recordLeakNonRefCountingOperation(leak);
return super.readLongLE();
}
@Override
public ByteBuf writeShortLE(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeShortLE(value);
}
@Override
public ByteBuf writeMediumLE(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeMediumLE(value);
}
@Override
public ByteBuf writeIntLE(int value) {
recordLeakNonRefCountingOperation(leak);
return super.writeIntLE(value);
}
@Override
public ByteBuf writeLongLE(long value) {
recordLeakNonRefCountingOperation(leak);
return super.writeLongLE(value);
}
@Override
public int getBytes(int index, FileChannel out, long position, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.getBytes(index, out, position, length);
}
@Override
public int setBytes(int index, FileChannel in, long position, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.setBytes(index, in, position, length);
}
@Override
public int readBytes(FileChannel out, long position, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.readBytes(out, position, length);
}
@Override
public int writeBytes(FileChannel in, long position, int length) throws IOException {
recordLeakNonRefCountingOperation(leak);
return super.writeBytes(in, position, length);
}
@Override
public ByteBuf retain() {
leak.record();
return super.retain();
}
@Override
public ByteBuf retain(int increment) {
leak.record();
return super.retain(increment);
}
@Override
public ByteBuf touch() {
leak.record();
return this;
}
@Override
public ByteBuf touch(Object hint) {
leak.record(hint);
return this;
}
@Override
public boolean release() {
boolean deallocated = super.release();
if (deallocated) {
leak.close();
} else {
leak.record();
}
return deallocated;
}
@Override
public boolean release(int decrement) {
boolean deallocated = super.release(decrement);
if (deallocated) {
leak.close();
} else {
leak.record();
}
return deallocated;
}
}
| |
/*
* Copyright 2008-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.entropysoft.transmorph.signature.parser;
import java.util.ArrayList;
import java.util.List;
import net.entropysoft.transmorph.signature.ArrayTypeSignature;
import net.entropysoft.transmorph.signature.ClassTypeSignature;
import net.entropysoft.transmorph.signature.FieldTypeSignature;
import net.entropysoft.transmorph.signature.PrimitiveTypeSignature;
import net.entropysoft.transmorph.signature.TypeArgSignature;
import net.entropysoft.transmorph.signature.FullTypeSignature;
import net.entropysoft.transmorph.signature.parser.JavaSyntaxTypeSignatureLexer.Token;
import net.entropysoft.transmorph.signature.parser.JavaSyntaxTypeSignatureLexer.TokenType;
/**
* Parser for java type signatures
*
* @author Cedric Chabanois (cchabanois at gmail.com)
*
*/
public class JavaSyntaxTypeSignatureParser implements ITypeSignatureParser {
private JavaSyntaxTypeSignatureLexer lexer;
private boolean acceptGenerics = true;
private IImportedClassesProvider[] importedClassesProviders = { new JavaLangImportedClassesProvider() };
public JavaSyntaxTypeSignatureParser() {
}
public JavaSyntaxTypeSignatureParser(String typeSignature) {
setTypeSignature(typeSignature);
}
public FullTypeSignature parseTypeSignature() throws InvalidSignatureException {
FullTypeSignature typeSignature = parseJavaTypeSignature();
lexer.nextToken(TokenType.END_OF_TOKENS);
return typeSignature;
}
public void setTypeSignature(String signature) {
lexer = new JavaSyntaxTypeSignatureLexer(signature);
}
public boolean isAcceptGenerics() {
return acceptGenerics;
}
public void setAcceptGenerics(boolean acceptGenerics) {
this.acceptGenerics = acceptGenerics;
}
public IImportedClassesProvider[] getImportedClasses() {
return importedClassesProviders;
}
public void setImportedClassesProviders(IImportedClassesProvider... importedClasses) {
this.importedClassesProviders = importedClasses;
}
public FullTypeSignature parseJavaTypeSignature() {
FullTypeSignature typeSignature = parsePrimitiveTypeSignature();
if (typeSignature == null) {
typeSignature = parseClassTypeSignature();
}
Token token = lexer.peekToken(0);
while (token.tokenType == TokenType.ARRAY) {
typeSignature = new ArrayTypeSignature(typeSignature);
lexer.nextToken();
token = lexer.peekToken(0);
}
return typeSignature;
}
public FieldTypeSignature parseFieldTypeSignature() {
FullTypeSignature typeSignature = parseJavaTypeSignature();
if (!(typeSignature instanceof FieldTypeSignature)) {
throw new InvalidSignatureException("Invalid signature", lexer.peekToken(0).tokenStart);
}
return (FieldTypeSignature)typeSignature;
}
private PrimitiveTypeSignature parsePrimitiveTypeSignature() {
Token token = lexer.peekToken(0);
Character primitiveChar = PrimitiveTypeUtils.getChar(token.text);
if (primitiveChar != null) {
Token token1 = lexer.peekToken(1);
if (token1.tokenType == TokenType.END_OF_TOKENS || token1.tokenType == TokenType.ARRAY) {
lexer.nextToken();
return new PrimitiveTypeSignature(primitiveChar);
}
}
return null;
}
private ClassTypeSignature parseClassTypeSignature() {
String outerClassName = parseOuterClassName();
TypeArgSignature[] typeArgSignatures = new TypeArgSignature[0];
Token token = lexer.peekToken(0);
if (acceptGenerics && token.tokenType == TokenType.TYPE_ARG_BEGIN) {
typeArgSignatures = parseTypeArgs();
}
ClassTypeSignature classTypeSignature = new ClassTypeSignature(
outerClassName, typeArgSignatures, null);
token = lexer.peekToken(0);
if (token.tokenType == TokenType.INNER_CLASS_PREFIX) {
classTypeSignature = parseInnerClasses(classTypeSignature);
}
return classTypeSignature;
}
/**
* parse the outer class name
*
* @return
*/
private String parseOuterClassName() {
StringBuilder sb = new StringBuilder();
sb.append(lexer.nextToken(TokenType.JAVA_ID).text);
Token token = lexer.peekToken(0);
boolean isFullyQualifiedName = false;
while (token.tokenType == TokenType.PACKAGE_SEPARATOR) {
isFullyQualifiedName = true;
token = lexer.nextToken();
sb.append('.');
token = lexer.nextToken(TokenType.JAVA_ID);
sb.append(token.text);
token = lexer.peekToken(0);
}
if (!isFullyQualifiedName) {
return getFullyQualifiedName(sb.toString());
} else {
return sb.toString();
}
}
public String getFullyQualifiedName(String shortName) {
for (IImportedClassesProvider importedClassesProvider : importedClassesProviders) {
if (importedClassesProvider.isImported(shortName)) {
return importedClassesProvider.getFullyQualifiedName(shortName);
}
}
return shortName;
}
private TypeArgSignature[] parseTypeArgs() {
lexer.nextToken(TokenType.TYPE_ARG_BEGIN);
List<TypeArgSignature> typeArgSignatures = new ArrayList<TypeArgSignature>();
typeArgSignatures.add(parseTypeArg());
Token token = lexer.peekToken(0);
while (token.tokenType == TokenType.TYPE_ARG_SEPARATOR) {
lexer.nextToken();
typeArgSignatures.add(parseTypeArg());
token = lexer.peekToken(0);
}
lexer.nextToken(TokenType.TYPE_ARG_END);
return typeArgSignatures.toArray(new TypeArgSignature[typeArgSignatures
.size()]);
}
private TypeArgSignature parseTypeArg() {
Token token = lexer.peekToken(0);
if (token.tokenType == TokenType.TYPE_ARG_QUESTION_MARK) {
lexer.nextToken();
token = lexer.peekToken(0);
if (token.tokenType == TokenType.JAVA_ID && token.text.equals("extends")) {
lexer.nextToken();
return new TypeArgSignature(
TypeArgSignature.UPPERBOUND_WILDCARD,
parseFieldTypeSignature());
} else if (token.tokenType == TokenType.JAVA_ID && token.text.equals("super")) {
lexer.nextToken();
return new TypeArgSignature(
TypeArgSignature.LOWERBOUND_WILDCARD,
parseFieldTypeSignature());
} else {
return new TypeArgSignature(
TypeArgSignature.UNBOUNDED_WILDCARD, null);
}
} else {
return new TypeArgSignature(TypeArgSignature.NO_WILDCARD,
parseFieldTypeSignature());
}
}
private ClassTypeSignature parseInnerClass(
ClassTypeSignature ownerClassTypeSignature) {
lexer.nextToken(TokenType.INNER_CLASS_PREFIX);
Token token = lexer.nextToken(TokenType.JAVA_ID);
String id = token.text;
token = lexer.peekToken(0);
TypeArgSignature[] typeArgSignatures = new TypeArgSignature[0];
if (acceptGenerics && token.tokenType == TokenType.TYPE_ARG_BEGIN) {
typeArgSignatures = parseTypeArgs();
}
return new ClassTypeSignature(id, typeArgSignatures,
ownerClassTypeSignature);
}
private ClassTypeSignature parseInnerClasses(
ClassTypeSignature ownerClassTypeSignature) {
ClassTypeSignature classTypeSignature = parseInnerClass(ownerClassTypeSignature);
while (true) {
Token token = lexer.peekToken(0);
if (token.tokenType != TokenType.INNER_CLASS_PREFIX) {
return classTypeSignature;
}
classTypeSignature = parseInnerClass(classTypeSignature);
}
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* ShowcaseAd.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201809.cm;
/**
* Represents a Showcase shopping ad.
* <span class="constraint AdxEnabled">This is enabled for
* AdX.</span>
*/
public class ShowcaseAd extends com.google.api.ads.adwords.axis.v201809.cm.Ad implements java.io.Serializable {
/* The name label for this ad.
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span> */
private java.lang.String name;
/* Headline displayed in the Showcase shopping ad. */
private java.lang.String headline;
/* Description displayed in the expanded view of the Showcase
* shopping ad. */
private java.lang.String description;
/* Image displayed in the collapsed view of the Showcase shopping
* ad.
* <p>The format of the image must be either JPEG
* or PNG and the size of the image must be
* 270x270 px. */
private com.google.api.ads.adwords.axis.v201809.cm.Image collapsedImage;
/* Image displayed in the expanded view of the Showcase shopping
* ad.
* <p>The format of the image must be either JPEG
* or PNG and the size of the image must be
* 1080x566 px.
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span> */
private com.google.api.ads.adwords.axis.v201809.cm.Image expandedImage;
public ShowcaseAd() {
}
public ShowcaseAd(
java.lang.Long id,
java.lang.String url,
java.lang.String displayUrl,
java.lang.String[] finalUrls,
java.lang.String[] finalMobileUrls,
com.google.api.ads.adwords.axis.v201809.cm.AppUrl[] finalAppUrls,
java.lang.String trackingUrlTemplate,
java.lang.String finalUrlSuffix,
com.google.api.ads.adwords.axis.v201809.cm.CustomParameters urlCustomParameters,
com.google.api.ads.adwords.axis.v201809.cm.UrlData[] urlData,
java.lang.Boolean automated,
com.google.api.ads.adwords.axis.v201809.cm.AdType type,
java.lang.Long devicePreference,
com.google.api.ads.adwords.axis.v201809.cm.SystemManagedEntitySource systemManagedEntitySource,
java.lang.String adType,
java.lang.String name,
java.lang.String headline,
java.lang.String description,
com.google.api.ads.adwords.axis.v201809.cm.Image collapsedImage,
com.google.api.ads.adwords.axis.v201809.cm.Image expandedImage) {
super(
id,
url,
displayUrl,
finalUrls,
finalMobileUrls,
finalAppUrls,
trackingUrlTemplate,
finalUrlSuffix,
urlCustomParameters,
urlData,
automated,
type,
devicePreference,
systemManagedEntitySource,
adType);
this.name = name;
this.headline = headline;
this.description = description;
this.collapsedImage = collapsedImage;
this.expandedImage = expandedImage;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("adType", getAdType())
.add("automated", getAutomated())
.add("collapsedImage", getCollapsedImage())
.add("description", getDescription())
.add("devicePreference", getDevicePreference())
.add("displayUrl", getDisplayUrl())
.add("expandedImage", getExpandedImage())
.add("finalAppUrls", getFinalAppUrls())
.add("finalMobileUrls", getFinalMobileUrls())
.add("finalUrlSuffix", getFinalUrlSuffix())
.add("finalUrls", getFinalUrls())
.add("headline", getHeadline())
.add("id", getId())
.add("name", getName())
.add("systemManagedEntitySource", getSystemManagedEntitySource())
.add("trackingUrlTemplate", getTrackingUrlTemplate())
.add("type", getType())
.add("url", getUrl())
.add("urlCustomParameters", getUrlCustomParameters())
.add("urlData", getUrlData())
.toString();
}
/**
* Gets the name value for this ShowcaseAd.
*
* @return name * The name label for this ad.
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span>
*/
public java.lang.String getName() {
return name;
}
/**
* Sets the name value for this ShowcaseAd.
*
* @param name * The name label for this ad.
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span>
*/
public void setName(java.lang.String name) {
this.name = name;
}
/**
* Gets the headline value for this ShowcaseAd.
*
* @return headline * Headline displayed in the Showcase shopping ad.
*/
public java.lang.String getHeadline() {
return headline;
}
/**
* Sets the headline value for this ShowcaseAd.
*
* @param headline * Headline displayed in the Showcase shopping ad.
*/
public void setHeadline(java.lang.String headline) {
this.headline = headline;
}
/**
* Gets the description value for this ShowcaseAd.
*
* @return description * Description displayed in the expanded view of the Showcase
* shopping ad.
*/
public java.lang.String getDescription() {
return description;
}
/**
* Sets the description value for this ShowcaseAd.
*
* @param description * Description displayed in the expanded view of the Showcase
* shopping ad.
*/
public void setDescription(java.lang.String description) {
this.description = description;
}
/**
* Gets the collapsedImage value for this ShowcaseAd.
*
* @return collapsedImage * Image displayed in the collapsed view of the Showcase shopping
* ad.
* <p>The format of the image must be either JPEG
* or PNG and the size of the image must be
* 270x270 px.
*/
public com.google.api.ads.adwords.axis.v201809.cm.Image getCollapsedImage() {
return collapsedImage;
}
/**
* Sets the collapsedImage value for this ShowcaseAd.
*
* @param collapsedImage * Image displayed in the collapsed view of the Showcase shopping
* ad.
* <p>The format of the image must be either JPEG
* or PNG and the size of the image must be
* 270x270 px.
*/
public void setCollapsedImage(com.google.api.ads.adwords.axis.v201809.cm.Image collapsedImage) {
this.collapsedImage = collapsedImage;
}
/**
* Gets the expandedImage value for this ShowcaseAd.
*
* @return expandedImage * Image displayed in the expanded view of the Showcase shopping
* ad.
* <p>The format of the image must be either JPEG
* or PNG and the size of the image must be
* 1080x566 px.
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span>
*/
public com.google.api.ads.adwords.axis.v201809.cm.Image getExpandedImage() {
return expandedImage;
}
/**
* Sets the expandedImage value for this ShowcaseAd.
*
* @param expandedImage * Image displayed in the expanded view of the Showcase shopping
* ad.
* <p>The format of the image must be either JPEG
* or PNG and the size of the image must be
* 1080x566 px.
* <span class="constraint Required">This field is
* required and should not be {@code null} when it is contained within
* {@link Operator}s : ADD.</span>
*/
public void setExpandedImage(com.google.api.ads.adwords.axis.v201809.cm.Image expandedImage) {
this.expandedImage = expandedImage;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof ShowcaseAd)) return false;
ShowcaseAd other = (ShowcaseAd) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.name==null && other.getName()==null) ||
(this.name!=null &&
this.name.equals(other.getName()))) &&
((this.headline==null && other.getHeadline()==null) ||
(this.headline!=null &&
this.headline.equals(other.getHeadline()))) &&
((this.description==null && other.getDescription()==null) ||
(this.description!=null &&
this.description.equals(other.getDescription()))) &&
((this.collapsedImage==null && other.getCollapsedImage()==null) ||
(this.collapsedImage!=null &&
this.collapsedImage.equals(other.getCollapsedImage()))) &&
((this.expandedImage==null && other.getExpandedImage()==null) ||
(this.expandedImage!=null &&
this.expandedImage.equals(other.getExpandedImage())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getName() != null) {
_hashCode += getName().hashCode();
}
if (getHeadline() != null) {
_hashCode += getHeadline().hashCode();
}
if (getDescription() != null) {
_hashCode += getDescription().hashCode();
}
if (getCollapsedImage() != null) {
_hashCode += getCollapsedImage().hashCode();
}
if (getExpandedImage() != null) {
_hashCode += getExpandedImage().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(ShowcaseAd.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ShowcaseAd"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("name");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "name"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("headline");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "headline"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("description");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "description"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("collapsedImage");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "collapsedImage"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Image"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("expandedImage");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "expandedImage"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Image"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.cpsolver.ifs.util.ToolBox;
import org.unitime.timetable.model.base.BaseTeachingRequest;
public class TeachingRequest extends BaseTeachingRequest implements Comparable<TeachingRequest> {
private static final long serialVersionUID = 1L;
public TeachingRequest() {
super();
}
@Override
public String htmlLabel() {
Set<TeachingClassRequest> requests = new TreeSet<TeachingClassRequest>(getClassRequests());
String classes = null;
for (TeachingClassRequest r: requests) {
if (classes == null)
classes = r.getTeachingClass().htmlLabel();
else
classes += ", " + r.getTeachingClass().htmlLabel();
}
return getOffering().getCourseName() + (classes == null ? "" : " " + classes);
}
@Override
public Session getSession() {
return getOffering().getSession();
}
@Override
public Department getDepartment() {
return getOffering().getDepartment();
}
public Map<SchedulingSubpart, List<TeachingClassRequest>> getSubparMap() {
Map<SchedulingSubpart, List<TeachingClassRequest>> map = new HashMap<SchedulingSubpart, List<TeachingClassRequest>>();
for (TeachingClassRequest r: getClassRequests()) {
List<TeachingClassRequest> requests = map.get(r.getTeachingClass().getSchedulingSubpart());
if (requests == null) {
requests = new ArrayList<TeachingClassRequest>();
map.put(r.getTeachingClass().getSchedulingSubpart(), requests);
}
requests.add(r);
}
return map;
}
public TeachingClassRequest getMasterRequest(boolean checkClassRequests) {
Map<SchedulingSubpart, List<TeachingClassRequest>> map = getSubparMap();
TeachingClassRequest master = null;
for (Map.Entry<SchedulingSubpart, List<TeachingClassRequest>> e: map.entrySet()) {
if (e.getValue().size() > 1) {
if (checkClassRequests) {
TeachingClassRequest r1 = e.getValue().get(0);
for (int i = 1; i < e.getValue().size(); i++) {
TeachingClassRequest r2 = e.getValue().get(i);
if (!ToolBox.equals(r1.isAssignInstructor(), r2.isAssignInstructor())) return null;
if (!ToolBox.equals(r1.isCommon(), r2.isCommon())) return null;
if (!ToolBox.equals(r1.getPercentShare(), r2.getPercentShare())) return null;
if (!ToolBox.equals(r1.isLead(), r2.isLead())) return null;
if (!ToolBox.equals(r1.isCanOverlap(), r2.isCanOverlap())) return null;
}
}
continue;
}
TeachingClassRequest adept = e.getValue().get(0);
if (master == null) {
master = adept;
} else if (master.isParentOf(adept)) {
master = adept;
} else if (!adept.isParentOf(master) && adept.getTeachingClass().getSchedulingSubpart().getClasses().size() > master.getTeachingClass().getSchedulingSubpart().getClasses().size()) {
master = adept;
}
}
return master;
}
public static List<Class_> getClasses(Class_ master, Set<SchedulingSubpart> subparts) {
List<Class_> classes = new ArrayList<Class_>();
for (SchedulingSubpart subpart: subparts) {
if (subpart.equals(master.getSchedulingSubpart())) {
classes.add(master);
} else if (subpart.isParentOf(master.getSchedulingSubpart())) {
for (Class_ c: subpart.getClasses())
if (c.isParentOf(master)) classes.add(c);
} else if (master.getSchedulingSubpart().isParentOf(subpart)) {
for (Class_ c: subpart.getClasses())
if (master.isParentOf(c)) classes.add(c);
} else {
Class_ parent = master.getParentClass();
while (parent != null && !parent.getSchedulingSubpart().isParentOf(subpart))
parent = parent.getParentClass();
if (parent != null) {
for (Class_ c: subpart.getClasses())
if (parent.isParentOf(c)) classes.add(c);
} else {
classes.addAll(subpart.getClasses());
}
}
}
return classes;
}
public boolean isStandard(TeachingClassRequest master) {
if (master == null) return false;
Set<SchedulingSubpart> subparts = new HashSet<SchedulingSubpart>();
Set<Class_> classes = new HashSet<Class_>();
for (TeachingClassRequest r: getClassRequests()) {
classes.add(r.getTeachingClass());
subparts.add(r.getTeachingClass().getSchedulingSubpart());
}
List<Class_> std = getClasses(master.getTeachingClass(), subparts);
if (std.size() != classes.size()) return false;
for (Class_ c: std) if (!classes.contains(c)) return false;
return true;
}
public boolean canCombine(TeachingRequest other) {
TeachingClassRequest m1 = getMasterRequest(true);
if (m1 == null || !isStandard(m1)) return false;
TeachingClassRequest m2 = other.getMasterRequest(true);
if (m2 == null || m1.getTeachingClass().equals(m2.getTeachingClass()) || !other.isStandard(m2) || !m1.getTeachingClass().getSchedulingSubpart().equals(m2.getTeachingClass().getSchedulingSubpart())) return false;
// different properties
if (!ToolBox.equals(getTeachingLoad(), other.getTeachingLoad())) return false;
if (!ToolBox.equals(getResponsibility(), other.getResponsibility())) return false;
if (!ToolBox.equals(getSameCoursePreference(), other.getSameCoursePreference())) return false;
if (!ToolBox.equals(getSameCommonPart(), other.getSameCommonPart())) return false;
if (!ToolBox.equals(isAssignCoordinator(), other.isAssignCoordinator())) return false;
// different preferences
Set<Preference> p1 = getPreferences();
Set<Preference> p2 = other.getPreferences();
if (p1.size() != p2.size()) return false;
p1: for (Preference p: p1) {
for (Preference q: p2) {
if (p.isSame(q) && p.getPrefLevel().equals(q.getPrefLevel())) continue p1;
}
return false;
}
return true;
}
@Override
public int compareTo(TeachingRequest r) {
int cmp = getOffering().getControllingCourseOffering().compareTo(r.getOffering().getControllingCourseOffering());
if (cmp != 0) return cmp;
Iterator<TeachingClassRequest> i1 = new TreeSet<TeachingClassRequest>(getClassRequests()).iterator();
Iterator<TeachingClassRequest> i2 = new TreeSet<TeachingClassRequest>(r.getClassRequests()).iterator();
while (i1.hasNext() && i2.hasNext()) {
cmp = i1.next().compareTo(i2.next());
if (cmp != 0) return cmp;
}
if (i2.hasNext()) return -1;
if (i1.hasNext()) return 1;
return (getUniqueId() == null ? Long.valueOf(-1) : getUniqueId()).compareTo(r.getUniqueId() == null ? -1 : r.getUniqueId());
}
public boolean isCancelled() {
if (isAssignCoordinator()) return false;
for (TeachingClassRequest tcr: getClassRequests()) {
if (tcr.isAssignInstructor() && !tcr.getTeachingClass().isCancelled()) return false;
}
return true;
}
public boolean isCommitted() {
for (TeachingClassRequest tcr: getClassRequests())
if (tcr.isAssignInstructor())
for (ClassInstructor ci: tcr.getTeachingClass().getClassInstructors())
if (this.equals(ci.getTeachingRequest()) && getAssignedInstructors().contains(ci.getInstructor())) return true;
return false;
}
@Override
public String toString() {
return getOffering().getCourseName() + " " + getClassRequests();
}
}
| |
package blue.stack.serializableParcelablegenerator;
import blue.stack.serializableParcelablegenerator.typeserializers.*;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import java.util.List;
public class CodeGenerator {
// public static final String CREATOR_NAME = "CREATOR";
private final PsiClass mClass;
private final List<PsiField> mFields;
private final TypeSerializerFactory mTypeSerializerFactory;
public CodeGenerator(PsiClass psiClass, List<PsiField> fields) {
mClass = psiClass;
mFields = fields;
this.mTypeSerializerFactory = new ChainSerializerFactory(
new BundleSerializerFactory(),
new DateSerializerFactory(),
new EnumerationSerializerFactory(),
new ParcelableListSerializerFactory(),
new PrimitiveTypeSerializerFactory(),
new PrimitiveArraySerializerFactory(),
new ListSerializerFactory(),
new ParcelableSerializerFactory(),
new SerializableSerializerFactory()
);
}
private String generateStaticCreator(PsiClass psiClass) {
// StringBuilder sb = new StringBuilder("public static final blue.stack.serializableParcelable.IParcelable.Creator<");
//
// String className = psiClass.getName();
//
// sb.append(className).append("> CREATOR = new blue.stack.serializableParcelable.IParcelable.Creator<").append(className).append(">(){")
StringBuilder sb = new StringBuilder("");
sb.append("@Override public ").append(" byte").append("[] getParacelableBytes() {")
.append("return new ").append("byte").append("[0];}")
;
return sb.toString();
}
// @Override
//public void createFromParcel(IParcel in)
private String generateConstructor(List<PsiField> fields, PsiClass psiClass) {
String className = psiClass.getName();
StringBuilder sb = new StringBuilder("@Override public blue.stack.serializableParcelable.IParcelable createFromParcel(blue.stack.serializableParcelable.IParcel in) {");
// Creates all of the deserialization methods for the given fields
for (PsiField field : fields) {
sb.append(getSerializerForType(field).readValue(field, "in"));
}
sb.append("\n" +
"return this;");
sb.append("}");
return sb.toString();
}
private String generateWriteToParcel(List<PsiField> fields) {
StringBuilder sb = new StringBuilder("@Override public void writeToParcel(blue.stack.serializableParcelable.IParcel dest, int constructID) {");
sb.append("dest.writeInt(constructID);\n");
for (PsiField field : fields) {
sb.append(getSerializerForType(field).writeValue(field, "dest", "constructID"));
}
sb.append("}");
return sb.toString();
}
private String generategetParacelableBytes() {
StringBuilder sb = new StringBuilder("@Override public byte[] getParacelableBytes() {");
// ParcelObject parcelObject = new ParcelObject();
// writeToParcel(parcelObject,constructID);
// return parcelObject.toByteArray();
sb.append("\n blue.stack.serializableParcelable.ParcelObject parcelObject = new blue.stack.serializableParcelable.ParcelObject(); \n");
sb.append("writeToParcel(parcelObject,constructID); \n");
sb.append("return parcelObject.toByteArray(); \n");
sb.append("}");
return sb.toString();
}
private TypeSerializer getSerializerForType(PsiField field) {
return mTypeSerializerFactory.getSerializer(field.getType());
}
// private String generateDescribeContents() {
// return "@Override public int describeContents() { return 0; }";
// }
public void generate() {
PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(mClass.getProject());
removeExistingParcelableImplementation(mClass);
// Describe contents method
// PsiMethod describeContentsMethod = elementFactory.createMethodFromText(generateDescribeContents(), mClass);
// Method for writing to the parcel
PsiMethod writeToParcelMethod = elementFactory.createMethodFromText(generateWriteToParcel(mFields), mClass);
// Default constructor if needed
String defaultConstructorString = generateDefaultConstructor(mClass);
PsiMethod defaultConstructor = null;
PsiMethod generategetParacelableBytesMethod = elementFactory.createMethodFromText(generategetParacelableBytes(), mClass);
if (defaultConstructorString != null) {
defaultConstructor = elementFactory.createMethodFromText(defaultConstructorString, mClass);
}
// Constructor
PsiMethod constructor = elementFactory.createMethodFromText(generateConstructor(mFields, mClass), mClass);
// CREATOR
// PsiField creatorField = elementFactory.createFieldFromText(generateStaticCreator(mClass), mClass);
JavaCodeStyleManager styleManager = JavaCodeStyleManager.getInstance(mClass.getProject());
// Shorten all class references
// styleManager.shortenClassReferences(mClass.addBefore(describeContentsMethod, mClass.getLastChild()));
styleManager.shortenClassReferences(mClass.addBefore(writeToParcelMethod, mClass.getLastChild()));
styleManager.shortenClassReferences(mClass.addBefore(generategetParacelableBytesMethod, mClass.getLastChild()));
// Only adds if available
if (defaultConstructor != null) {
styleManager.shortenClassReferences(mClass.addBefore(defaultConstructor, mClass.getLastChild()));
}
styleManager.shortenClassReferences(mClass.addBefore(constructor, mClass.getLastChild()));
// styleManager.shortenClassReferences(mClass.addBefore(creatorField, mClass.getLastChild()));
makeClassImplementParcelable(elementFactory);
}
/**
* Strips the
*
* @param psiClass
*/
private void removeExistingParcelableImplementation(PsiClass psiClass) {
PsiField[] allFields = psiClass.getAllFields();
// Look for an existing CREATOR and remove it
// for (PsiField field : allFields) {
// if (field.getName().equals(CREATOR_NAME)) {
// // Creator already exists, need to remove/replace it
// field.delete();
// }
// }
findAndRemoveMethod(psiClass, "createFromParcel", "blue.stack.serializableParcelable.IParcel");
findAndRemoveMethod(psiClass, "getParacelableBytes");
// findAndRemoveMethod(psiClass, "describeContents");
findAndRemoveMethod(psiClass, "writeToParcel", "blue.stack.serializableParcelable.IParcel", "int");
}
private String generateDefaultConstructor(PsiClass clazz) {
// Check for any constructors; if none exist, we'll make a default one
if (clazz.getConstructors().length == 0) {
// No constructors exist, make a default one for convenience
StringBuilder sb = new StringBuilder();
sb.append("public ").append(clazz.getName()).append("(){}").append('\n');
return sb.toString();
} else {
return null;
}
}
private void makeClassImplementParcelable(PsiElementFactory elementFactory) {
final PsiClassType[] implementsListTypes = mClass.getImplementsListTypes();
final String implementsType = "blue.stack.serializableParcelable.IParcelable";
for (PsiClassType implementsListType : implementsListTypes) {
PsiClass resolved = implementsListType.resolve();
// Already implements Parcelable, no need to add it
if (resolved != null && implementsType.equals(resolved.getQualifiedName())) {
return;
}
}
PsiJavaCodeReferenceElement implementsReference = elementFactory.createReferenceFromText(implementsType, mClass);
PsiReferenceList implementsList = mClass.getImplementsList();
if (implementsList != null) {
implementsList.add(implementsReference);
}
}
private static void findAndRemoveMethod(PsiClass clazz, String methodName, String... arguments) {
// Maybe there's an easier way to do this with mClass.findMethodBySignature(), but I'm not an expert on Psi*
PsiMethod[] methods = clazz.findMethodsByName(methodName, false);
for (PsiMethod method : methods) {
PsiParameterList parameterList = method.getParameterList();
if (parameterList.getParametersCount() == arguments.length) {
boolean shouldDelete = true;
PsiParameter[] parameters = parameterList.getParameters();
for (int i = 0; i < arguments.length; i++) {
if (!parameters[i].getType().getCanonicalText().equals(arguments[i])) {
shouldDelete = false;
}
}
if (shouldDelete) {
method.delete();
}
}
}
}
}
| |
/*
* Copyright 2019 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.endpoint.healthcheck;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.stream.IntStream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Streams;
import com.linecorp.armeria.client.Endpoint;
class PartialHealthCheckStrategyTest {
private static final double HEALTHY = 1;
private static final double UNHEALTHY = 0;
private static final double MAX_RATIO = 0.9;
private static final int MAX_COUNT = 5;
private static List<Endpoint> createCandidates(int size) {
final Random random = new Random();
return IntStream.range(0, size)
.mapToObj(i -> Endpoint.of("dummy" + random.nextInt()))
.collect(toImmutableList());
}
private static void assertCandidates(List<Endpoint> actualCandidates, List<Endpoint> expectedCandidates) {
assertThat(actualCandidates).hasSize(expectedCandidates.size());
for (Endpoint expectedCandidate : expectedCandidates) {
assertThat(actualCandidates).contains(expectedCandidate);
}
}
private static void assertUniqueCandidates(List<Endpoint> candidates) {
if (candidates.isEmpty()) {
return;
}
assertThat(candidates).hasSameSizeAs(ImmutableSet.copyOf(candidates));
}
private PartialHealthCheckStrategy maxRatioStrategy;
private PartialHealthCheckStrategy maxCountStrategy;
private List<Endpoint> candidatesForMaxRatio;
private List<Endpoint> candidatesForMaxCount;
@BeforeEach
void beforeEach() {
maxRatioStrategy = new PartialHealthCheckStrategyBuilder().maxEndpointRatio(MAX_RATIO).build();
maxCountStrategy = new PartialHealthCheckStrategyBuilder().maxEndpointCount(MAX_COUNT).build();
candidatesForMaxRatio = createCandidates(10);
candidatesForMaxCount = createCandidates(6);
maxRatioStrategy.updateCandidates(candidatesForMaxRatio);
maxCountStrategy.updateCandidates(candidatesForMaxCount);
}
@Test
void getCandidatesWhenBeforeFirstUpdateCandidates() {
maxRatioStrategy = new PartialHealthCheckStrategyBuilder().maxEndpointRatio(MAX_RATIO)
.build();
assertThat(maxRatioStrategy.getSelectedEndpoints()).isEmpty();
}
@Test
void getCandidatesAfterSettingEmptyCandidates() {
maxRatioStrategy.updateCandidates(new ArrayList<>());
assertThat(maxRatioStrategy.getSelectedEndpoints()).isEmpty();
}
@Test
void getCandidates() {
maxRatioStrategy.updateCandidates(candidatesForMaxRatio);
final List<Endpoint> selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
assertThat(selectedCandidates).hasSize(9);
selectedCandidates.forEach(
selectedCandidate -> assertThat(candidatesForMaxRatio).contains(selectedCandidate));
assertUniqueCandidates(selectedCandidates);
}
@Test
void updateHealthWhenEndpointIsHealthy() {
final Endpoint endpoint = candidatesForMaxRatio.get(1);
assertThat(maxRatioStrategy.updateHealth(endpoint, HEALTHY)).isFalse();
}
@Test
void updateHealthWhenEndpointIsUnhealthyOnMaxRatioMode() {
final Endpoint unhealthyEndpoint = maxRatioStrategy.getSelectedEndpoints().get(0);
assertThat(maxRatioStrategy.updateHealth(unhealthyEndpoint, UNHEALTHY)).isTrue();
final List<Endpoint> selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
assertThat(selectedCandidates).hasSize(9)
.doesNotContain(unhealthyEndpoint);
selectedCandidates.forEach(
selectedCandidate -> assertThat(candidatesForMaxRatio).contains(selectedCandidate));
assertUniqueCandidates(selectedCandidates);
}
@Test
void updateHealthWhenEndpointIsUnhealthyOnMaxValueMode() {
final Endpoint unhealthyEndpoint = maxCountStrategy.getSelectedEndpoints().get(0);
assertThat(maxCountStrategy.updateHealth(unhealthyEndpoint, UNHEALTHY)).isTrue();
final List<Endpoint> selectedCandidates = maxCountStrategy.getSelectedEndpoints();
assertThat(selectedCandidates).hasSize(5)
.doesNotContain(unhealthyEndpoint);
selectedCandidates.forEach(
selectedCandidate -> assertThat(candidatesForMaxCount).contains(selectedCandidate));
}
@Test
void updateHealthWhenEndpointIsUnhealthyButDoesNotHaveEnoughCandidatesOnMaxRatioMode() {
final List<Endpoint> endpoints = createCandidates(5);
maxRatioStrategy = new PartialHealthCheckStrategyBuilder().maxEndpointRatio(1).build();
maxRatioStrategy.updateCandidates(endpoints);
for (Endpoint unhealthyEndpoint : maxRatioStrategy.getSelectedEndpoints()) {
final boolean updateRes = maxRatioStrategy.updateHealth(unhealthyEndpoint, UNHEALTHY);
final List<Endpoint> selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
// When there are not enough candidates, some of the unhealthy candidates are chosen again.
// At this time, even an unhealthy candidate delivered by the function may be randomly chosen again.
if (selectedCandidates.contains(unhealthyEndpoint)) {
assertThat(updateRes).isFalse();
} else {
assertThat(updateRes).isTrue();
}
assertThat(selectedCandidates).hasSize(5);
selectedCandidates.forEach(
selectedCandidate -> assertThat(endpoints).contains(selectedCandidate));
}
}
@Test
void updateHealthWhenEndpointIsUnhealthyButDoesNotHaveEnoughCandidatesOnMaxValueMode() {
for (Endpoint unhealthyEndpoint : maxCountStrategy.getSelectedEndpoints()) {
final boolean updateRes = maxCountStrategy.updateHealth(unhealthyEndpoint, UNHEALTHY);
final List<Endpoint> selectedCandidates = maxCountStrategy.getSelectedEndpoints();
// When there are not enough candidates, some of the unhealthy candidates are chosen again.
// At this time, even an unhealthy candidate delivered by the function may be randomly chosen again.
if (selectedCandidates.contains(unhealthyEndpoint)) {
assertThat(updateRes).isFalse();
} else {
assertThat(updateRes).isTrue();
}
assertThat(selectedCandidates).hasSize(5);
selectedCandidates.forEach(
selectedCandidate -> assertThat(candidatesForMaxCount).contains(selectedCandidate));
}
}
@Test
void updateHealthWhenMaxRatioMode() {
List<Endpoint> selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
final Endpoint unhealthyCandidate = selectedCandidates.get(0);
assertThat(selectedCandidates).hasSize(9);
boolean updateRes = maxRatioStrategy.updateHealth(unhealthyCandidate, UNHEALTHY);
selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
assertThat(updateRes).isTrue();
assertThat(selectedCandidates).hasSize(9)
.doesNotContain(unhealthyCandidate);
updateRes = maxRatioStrategy.updateHealth(unhealthyCandidate, HEALTHY);
selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
assertThat(updateRes).isFalse();
assertThat(selectedCandidates).hasSize(9)
.doesNotContain(unhealthyCandidate);
updateRes = maxRatioStrategy.updateHealth(selectedCandidates.get(0), UNHEALTHY);
selectedCandidates = maxRatioStrategy.getSelectedEndpoints();
assertThat(updateRes).isTrue();
assertThat(selectedCandidates).hasSize(9)
.contains(unhealthyCandidate);
}
@Test
void updateHealthWhenMaxValueMode() {
List<Endpoint> selectedCandidates = maxCountStrategy.getSelectedEndpoints();
final Endpoint unhealthyCandidate = selectedCandidates.get(0);
assertThat(selectedCandidates).hasSize(5);
boolean updateRes = maxCountStrategy.updateHealth(unhealthyCandidate, UNHEALTHY);
selectedCandidates = maxCountStrategy.getSelectedEndpoints();
assertThat(updateRes).isTrue();
assertThat(selectedCandidates).hasSize(5)
.doesNotContain(unhealthyCandidate);
updateRes = maxCountStrategy.updateHealth(unhealthyCandidate, HEALTHY);
selectedCandidates = maxCountStrategy.getSelectedEndpoints();
assertThat(updateRes).isFalse();
assertThat(selectedCandidates).hasSize(5)
.doesNotContain(unhealthyCandidate);
updateRes = maxCountStrategy.updateHealth(selectedCandidates.get(0), UNHEALTHY);
selectedCandidates = maxCountStrategy.getSelectedEndpoints();
assertThat(updateRes).isTrue();
assertThat(selectedCandidates).hasSize(5)
.contains(unhealthyCandidate);
}
@Test
void updateHealthByDisappearedCandidate() {
final Endpoint disappearedCandidate = Endpoint.of("disappeared");
final List<Endpoint> candidates = createCandidates(3);
maxCountStrategy.updateCandidates(candidates);
assertThat(maxCountStrategy.getSelectedEndpoints()).hasSize(3);
boolean updateRes = maxCountStrategy.updateHealth(disappearedCandidate, HEALTHY);
assertThat(updateRes).isTrue();
List<Endpoint> selectedCandidates = maxCountStrategy.getSelectedEndpoints();
assertThat(selectedCandidates).hasSize(3)
.doesNotContain(disappearedCandidate);
updateRes = maxCountStrategy.updateHealth(disappearedCandidate, UNHEALTHY);
assertThat(updateRes).isTrue();
selectedCandidates = maxCountStrategy.getSelectedEndpoints();
assertThat(selectedCandidates).hasSize(3)
.doesNotContain(disappearedCandidate);
}
@Test
void updateCandidates() {
final List<Endpoint> newCandidates = createCandidates(5);
maxCountStrategy.updateCandidates(newCandidates);
assertCandidates(maxCountStrategy.getSelectedEndpoints(), newCandidates);
final List<Endpoint> someOfOldCandidates = candidatesForMaxCount.subList(0, 3);
maxCountStrategy.updateCandidates(someOfOldCandidates);
assertCandidates(maxCountStrategy.getSelectedEndpoints(), someOfOldCandidates);
final List<Endpoint> mixedCandidates = Streams.concat(createCandidates(2).stream(),
someOfOldCandidates.stream())
.collect(toImmutableList());
maxCountStrategy.updateCandidates(mixedCandidates);
assertCandidates(maxCountStrategy.getSelectedEndpoints(), mixedCandidates);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.agent.core.context.trace;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.skywalking.apm.agent.core.context.util.KeyValuePair;
import org.apache.skywalking.apm.agent.core.context.util.ThrowableTransformer;
import org.apache.skywalking.apm.agent.core.dictionary.DictionaryUtil;
import org.apache.skywalking.apm.network.proto.SpanObject;
import org.apache.skywalking.apm.network.proto.SpanType;
import org.apache.skywalking.apm.network.trace.component.Component;
/**
* The <code>AbstractTracingSpan</code> represents a group of {@link AbstractSpan} implementations, which belongs a real
* distributed trace.
*
* @author wusheng
*/
public abstract class AbstractTracingSpan implements AbstractSpan {
protected int spanId;
protected int parentSpanId;
protected List<KeyValuePair> tags;
protected String operationName;
protected int operationId;
protected SpanLayer layer;
/**
* The start time of this Span.
*/
protected long startTime;
/**
* The end time of this Span.
*/
protected long endTime;
/**
* Error has occurred in the scope of span.
*/
protected boolean errorOccurred = false;
protected int componentId = 0;
protected String componentName;
/**
* Log is a concept from OpenTracing spec. https://github.com/opentracing/specification/blob/master/specification.md#log-structured-data
*/
protected List<LogDataEntity> logs;
/**
* The refs of parent trace segments, except the primary one. For most RPC call, {@link #refs} contains only one
* element, but if this segment is a start span of batch process, the segment faces multi parents, at this moment,
* we use this {@link #refs} to link them.
*/
protected List<TraceSegmentRef> refs;
protected AbstractTracingSpan(int spanId, int parentSpanId, String operationName) {
this.operationName = operationName;
this.operationId = DictionaryUtil.nullValue();
this.spanId = spanId;
this.parentSpanId = parentSpanId;
}
protected AbstractTracingSpan(int spanId, int parentSpanId, int operationId) {
this.operationName = null;
this.operationId = operationId;
this.spanId = spanId;
this.parentSpanId = parentSpanId;
}
/**
* Set a key:value tag on the Span.
*
* @return this Span instance, for chaining
*/
@Override
public AbstractTracingSpan tag(String key, String value) {
if (tags == null) {
tags = new LinkedList<KeyValuePair>();
}
tags.add(new KeyValuePair(key, value));
return this;
}
/**
* Finish the active Span. When it is finished, it will be archived by the given {@link TraceSegment}, which owners
* it.
*
* @param owner of the Span.
*/
public boolean finish(TraceSegment owner) {
this.endTime = System.currentTimeMillis();
owner.archive(this);
return true;
}
@Override
public AbstractTracingSpan start() {
this.startTime = System.currentTimeMillis();
return this;
}
/**
* Record an exception event of the current walltime timestamp.
*
* @param t any subclass of {@link Throwable}, which occurs in this span.
* @return the Span, for chaining
*/
@Override
public AbstractTracingSpan log(Throwable t) {
if (logs == null) {
logs = new LinkedList<LogDataEntity>();
}
logs.add(new LogDataEntity.Builder()
.add(new KeyValuePair("event", "error"))
.add(new KeyValuePair("error.kind", t.getClass().getName()))
.add(new KeyValuePair("message", t.getMessage()))
.add(new KeyValuePair("stack", ThrowableTransformer.INSTANCE.convert2String(t, 4000)))
.build(System.currentTimeMillis()));
return this;
}
/**
* Record a common log with multi fields, for supporting opentracing-java
*
* @param fields
* @return the Span, for chaining
*/
@Override
public AbstractTracingSpan log(long timestampMicroseconds, Map<String, ?> fields) {
if (logs == null) {
logs = new LinkedList<LogDataEntity>();
}
LogDataEntity.Builder builder = new LogDataEntity.Builder();
for (Map.Entry<String, ?> entry : fields.entrySet()) {
builder.add(new KeyValuePair(entry.getKey(), entry.getValue().toString()));
}
logs.add(builder.build(timestampMicroseconds));
return this;
}
/**
* In the scope of this span tracing context, error occurred, in auto-instrumentation mechanism, almost means throw
* an exception.
*
* @return span instance, for chaining.
*/
@Override
public AbstractTracingSpan errorOccurred() {
this.errorOccurred = true;
return this;
}
/**
* Set the operation name, just because these is not compress dictionary value for this name. Use the entire string
* temporarily, the agent will compress this name in async mode.
*
* @param operationName
* @return span instance, for chaining.
*/
@Override
public AbstractTracingSpan setOperationName(String operationName) {
this.operationName = operationName;
this.operationId = DictionaryUtil.nullValue();
return this;
}
/**
* Set the operation id, which compress by the name.
*
* @param operationId
* @return span instance, for chaining.
*/
@Override
public AbstractTracingSpan setOperationId(int operationId) {
this.operationId = operationId;
this.operationName = null;
return this;
}
@Override
public int getSpanId() {
return spanId;
}
@Override
public int getOperationId() {
return operationId;
}
@Override
public String getOperationName() {
return operationName;
}
@Override
public AbstractTracingSpan setLayer(SpanLayer layer) {
this.layer = layer;
return this;
}
/**
* Set the component of this span, with internal supported. Highly recommend to use this way.
*
* @param component
* @return span instance, for chaining.
*/
@Override
public AbstractTracingSpan setComponent(Component component) {
this.componentId = component.getId();
return this;
}
/**
* Set the component name. By using this, cost more memory and network.
*
* @param componentName
* @return span instance, for chaining.
*/
@Override
public AbstractTracingSpan setComponent(String componentName) {
this.componentName = componentName;
return this;
}
@Override
public AbstractSpan start(long startTime) {
this.startTime = startTime;
return this;
}
public SpanObject.Builder transform() {
SpanObject.Builder spanBuilder = SpanObject.newBuilder();
spanBuilder.setSpanId(this.spanId);
spanBuilder.setParentSpanId(parentSpanId);
spanBuilder.setStartTime(startTime);
spanBuilder.setEndTime(endTime);
if (operationId != DictionaryUtil.nullValue()) {
spanBuilder.setOperationNameId(operationId);
} else {
spanBuilder.setOperationName(operationName);
}
if (isEntry()) {
spanBuilder.setSpanType(SpanType.Entry);
} else if (isExit()) {
spanBuilder.setSpanType(SpanType.Exit);
} else {
spanBuilder.setSpanType(SpanType.Local);
}
if (this.layer != null) {
spanBuilder.setSpanLayerValue(this.layer.getCode());
}
if (componentId != DictionaryUtil.nullValue()) {
spanBuilder.setComponentId(componentId);
} else {
if (componentName != null) {
spanBuilder.setComponent(componentName);
}
}
spanBuilder.setIsError(errorOccurred);
if (this.tags != null) {
for (KeyValuePair tag : this.tags) {
spanBuilder.addTags(tag.transform());
}
}
if (this.logs != null) {
for (LogDataEntity log : this.logs) {
spanBuilder.addLogs(log.transform());
}
}
if (this.refs != null) {
for (TraceSegmentRef ref : this.refs) {
spanBuilder.addRefs(ref.transform());
}
}
return spanBuilder;
}
@Override public void ref(TraceSegmentRef ref) {
if (refs == null) {
refs = new LinkedList<TraceSegmentRef>();
}
if (!refs.contains(ref)) {
refs.add(ref);
}
}
}
| |
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201809.shoppingcampaigns;
import static com.google.api.ads.common.lib.utils.Builder.DEFAULT_CONFIGURATION_FILENAME;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.utils.v201809.SelectorBuilder;
import com.google.api.ads.adwords.axis.v201809.cm.ApiError;
import com.google.api.ads.adwords.axis.v201809.cm.ApiException;
import com.google.api.ads.adwords.axis.v201809.cm.ConstantDataServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.ProductBiddingCategoryData;
import com.google.api.ads.adwords.axis.v201809.cm.Selector;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.adwords.lib.factory.AdWordsServicesInterface;
import com.google.api.ads.adwords.lib.selectorfields.v201809.cm.ConstantDataField;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.common.lib.conf.ConfigurationLoadException;
import com.google.api.ads.common.lib.exception.OAuthException;
import com.google.api.ads.common.lib.exception.ValidationException;
import com.google.api.client.auth.oauth2.Credential;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* This example fetches the set of valid ProductBiddingCategories.
*
* <p>Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*/
public class GetProductCategoryTaxonomy {
public static void main(String[] args) {
AdWordsSession session;
try {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential =
new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
session =
new AdWordsSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build();
} catch (ConfigurationLoadException cle) {
System.err.printf(
"Failed to load configuration from the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, cle);
return;
} catch (ValidationException ve) {
System.err.printf(
"Invalid configuration in the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, ve);
return;
} catch (OAuthException oe) {
System.err.printf(
"Failed to create OAuth credentials. Check OAuth settings in the %s file. "
+ "Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, oe);
return;
}
AdWordsServicesInterface adWordsServices = AdWordsServices.getInstance();
try {
runExample(adWordsServices, session);
} catch (ApiException apiException) {
// ApiException is the base class for most exceptions thrown by an API request. Instances
// of this exception have a message and a collection of ApiErrors that indicate the
// type and underlying cause of the exception. Every exception object in the adwords.axis
// packages will return a meaningful value from toString
//
// ApiException extends RemoteException, so this catch block must appear before the
// catch block for RemoteException.
System.err.println("Request failed due to ApiException. Underlying ApiErrors:");
if (apiException.getErrors() != null) {
int i = 0;
for (ApiError apiError : apiException.getErrors()) {
System.err.printf(" Error %d: %s%n", i++, apiError);
}
}
} catch (RemoteException re) {
System.err.printf(
"Request failed unexpectedly due to RemoteException: %s%n", re);
}
}
/**
* Runs the example.
*
* @param adWordsServices the services factory.
* @param session the session.
* @throws ApiException if the API request failed with one or more service errors.
* @throws RemoteException if the API request failed due to other errors.
*/
public static void runExample(AdWordsServicesInterface adWordsServices, AdWordsSession session)
throws RemoteException {
// Get the constant data service.
ConstantDataServiceInterface constantDataService =
adWordsServices.get(session, ConstantDataServiceInterface.class);
Selector selector = new SelectorBuilder()
.equals(ConstantDataField.Country, "US")
.build();
ProductBiddingCategoryData[] results =
constantDataService.getProductBiddingCategoryData(selector);
// List of top level category nodes.
List<CategoryNode> rootCategories = new ArrayList<>();
// Map of category ID to category node for all categories found in the results.
Map<Long, CategoryNode> biddingCategories = Maps.newHashMap();
for (ProductBiddingCategoryData productBiddingCategoryData : results) {
Long id = productBiddingCategoryData.getDimensionValue().getValue();
String name = productBiddingCategoryData.getDisplayValue(0).getValue();
CategoryNode node = biddingCategories.get(id);
if (node == null) {
node = new CategoryNode(id, name);
biddingCategories.put(id, node);
} else if (node.name == null) {
// Ensure that the name attribute for the node is set. Name will be null for nodes added
// to biddingCategories as a result of being a parentNode below.
node.name = name;
}
if (productBiddingCategoryData.getParentDimensionValue() != null
&& productBiddingCategoryData.getParentDimensionValue().getValue() != null) {
Long parentId = productBiddingCategoryData.getParentDimensionValue().getValue();
CategoryNode parentNode = biddingCategories.get(parentId);
if (parentNode == null) {
parentNode = new CategoryNode(parentId);
biddingCategories.put(parentId, parentNode);
}
parentNode.children.add(node);
} else {
rootCategories.add(node);
}
}
displayCategories(rootCategories, "");
}
/**
* Recursively prints out each category node and its children.
*
* @param categories the categories to print.
* @param prefix the string to print at the beginning of each line of output.
*/
private static void displayCategories(List<CategoryNode> categories, String prefix) {
for (CategoryNode category : categories) {
System.out.printf("%s%s [%s]%n", prefix, category.name, category.id);
displayCategories(category.children, String.format("%s%s > ", prefix, category.name));
}
}
/**
* Node that tracks a product bidding category's id, name, and child nodes.
*/
private static class CategoryNode {
final Long id;
String name;
final List<CategoryNode> children;
/**
* Constructor for categories first encountered as non-parent elements in the results.
*
* @param id the ID of the category
* @param name the name of the category
*/
CategoryNode(Long id, String name) {
this.children = new ArrayList<>();
this.id = Preconditions.checkNotNull(id);
this.name = name;
}
/**
* Constructor for categories first encountered as a parent category, in which case only the ID
* is available.
*
* @param id the ID of the category
*/
CategoryNode(Long id) {
this(id, null);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.blackhole;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorInsertTableHandle;
import com.facebook.presto.spi.ConnectorNewTableLayout;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableLayout;
import com.facebook.presto.spi.ConnectorTableLayoutHandle;
import com.facebook.presto.spi.ConnectorTableLayoutResult;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.connector.ConnectorMetadata;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import io.airlift.slice.Slice;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import static com.facebook.presto.plugin.blackhole.BlackHoleConnector.DISTRIBUTED_ON;
import static com.facebook.presto.plugin.blackhole.BlackHoleConnector.FIELD_LENGTH_PROPERTY;
import static com.facebook.presto.plugin.blackhole.BlackHoleConnector.PAGES_PER_SPLIT_PROPERTY;
import static com.facebook.presto.plugin.blackhole.BlackHoleConnector.ROWS_PER_PAGE_PROPERTY;
import static com.facebook.presto.plugin.blackhole.BlackHoleConnector.SPLIT_COUNT_PROPERTY;
import static com.facebook.presto.plugin.blackhole.BlackHoleInsertTableHandle.BLACK_HOLE_INSERT_TABLE_HANDLE;
import static com.facebook.presto.plugin.blackhole.Types.checkType;
import static com.facebook.presto.spi.StandardErrorCode.INVALID_TABLE_PROPERTY;
import static com.google.common.base.Preconditions.checkArgument;
import static java.text.MessageFormat.format;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toSet;
public class BlackHoleMetadata
implements ConnectorMetadata
{
public static final String SCHEMA_NAME = "default";
private final Map<String, BlackHoleTableHandle> tables = new ConcurrentHashMap<>();
@Override
public List<String> listSchemaNames(ConnectorSession session)
{
return ImmutableList.of(SCHEMA_NAME);
}
@Override
public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName)
{
return tables.get(tableName.getTableName());
}
@Override
public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle)
{
BlackHoleTableHandle blackHoleTableHandle = checkType(tableHandle, BlackHoleTableHandle.class, "tableHandle");
return blackHoleTableHandle.toTableMetadata();
}
@Override
public List<SchemaTableName> listTables(ConnectorSession session, String schemaNameOrNull)
{
if (schemaNameOrNull != null && !schemaNameOrNull.equals(SCHEMA_NAME)) {
return ImmutableList.of();
}
return tables.values().stream()
.map(BlackHoleTableHandle::toSchemaTableName)
.collect(toList());
}
@Override
public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle)
{
BlackHoleTableHandle blackHoleTableHandle = checkType(tableHandle, BlackHoleTableHandle.class, "tableHandle");
return blackHoleTableHandle.getColumnHandles().stream()
.collect(toMap(BlackHoleColumnHandle::getName, column -> column));
}
@Override
public ColumnMetadata getColumnMetadata(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle columnHandle)
{
BlackHoleColumnHandle blackHoleColumnHandle = checkType(columnHandle, BlackHoleColumnHandle.class, "columnHandle");
return blackHoleColumnHandle.toColumnMetadata();
}
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
return tables.values().stream()
.filter(table -> prefix.matches(table.toSchemaTableName()))
.collect(toMap(BlackHoleTableHandle::toSchemaTableName, handle -> handle.toTableMetadata().getColumns()));
}
@Override
public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle)
{
BlackHoleTableHandle blackHoleTableHandle = checkType(tableHandle, BlackHoleTableHandle.class, "tableHandle");
tables.remove(blackHoleTableHandle.getTableName());
}
@Override
public void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTableName)
{
BlackHoleTableHandle oldTableHandle = checkType(tableHandle, BlackHoleTableHandle.class, "tableHandle");
BlackHoleTableHandle newTableHandle = new BlackHoleTableHandle(
oldTableHandle.getSchemaName(),
newTableName.getTableName(),
oldTableHandle.getColumnHandles(),
oldTableHandle.getSplitCount(),
oldTableHandle.getPagesPerSplit(),
oldTableHandle.getRowsPerPage(),
oldTableHandle.getFieldsLength()
);
tables.remove(oldTableHandle.getTableName());
tables.put(newTableName.getTableName(), newTableHandle);
}
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
ConnectorOutputTableHandle outputTableHandle = beginCreateTable(session, tableMetadata, Optional.empty());
finishCreateTable(session, outputTableHandle, ImmutableList.of());
}
@Override
public Optional<ConnectorNewTableLayout> getNewTableLayout(ConnectorSession connectorSession, ConnectorTableMetadata tableMetadata)
{
List<String> distributeColumns = (List<String>) tableMetadata.getProperties().get(DISTRIBUTED_ON);
if (distributeColumns.isEmpty()) {
return Optional.empty();
}
Set<String> undefinedColumns = Sets.difference(
ImmutableSet.copyOf(distributeColumns),
tableMetadata.getColumns().stream()
.map(ColumnMetadata::getName)
.collect(toSet()));
if (!undefinedColumns.isEmpty()) {
throw new PrestoException(INVALID_TABLE_PROPERTY, "Distribute columns not defined on table: " + undefinedColumns);
}
return Optional.of(new ConnectorNewTableLayout(BlackHolePartitioningHandle.INSTANCE, distributeColumns));
}
@Override
public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, Optional<ConnectorNewTableLayout> layout)
{
int splitCount = (Integer) tableMetadata.getProperties().get(SPLIT_COUNT_PROPERTY);
int pagesPerSplit = (Integer) tableMetadata.getProperties().get(PAGES_PER_SPLIT_PROPERTY);
int rowsPerPage = (Integer) tableMetadata.getProperties().get(ROWS_PER_PAGE_PROPERTY);
int fieldsLength = (Integer) tableMetadata.getProperties().get(FIELD_LENGTH_PROPERTY);
if (splitCount < 0) {
throw new PrestoException(INVALID_TABLE_PROPERTY, SPLIT_COUNT_PROPERTY + " property is negative");
}
if (pagesPerSplit < 0) {
throw new PrestoException(INVALID_TABLE_PROPERTY, PAGES_PER_SPLIT_PROPERTY + " property is negative");
}
if (rowsPerPage < 0) {
throw new PrestoException(INVALID_TABLE_PROPERTY, ROWS_PER_PAGE_PROPERTY + " property is negative");
}
if (((splitCount > 0) || (pagesPerSplit > 0) || (rowsPerPage > 0)) &&
((splitCount == 0) || (pagesPerSplit == 0) || (rowsPerPage == 0))) {
throw new PrestoException(INVALID_TABLE_PROPERTY, format("All properties [%s, %s, %s] must be set if any are set",
SPLIT_COUNT_PROPERTY, PAGES_PER_SPLIT_PROPERTY, ROWS_PER_PAGE_PROPERTY));
}
return new BlackHoleOutputTableHandle(new BlackHoleTableHandle(
tableMetadata,
splitCount,
pagesPerSplit,
rowsPerPage,
fieldsLength));
}
@Override
public void finishCreateTable(ConnectorSession session, ConnectorOutputTableHandle tableHandle, Collection<Slice> fragments)
{
BlackHoleOutputTableHandle blackHoleOutputTableHandle = checkType(tableHandle, BlackHoleOutputTableHandle.class, "tableHandle");
BlackHoleTableHandle table = blackHoleOutputTableHandle.getTable();
tables.put(table.getTableName(), table);
}
@Override
public ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle)
{
return BLACK_HOLE_INSERT_TABLE_HANDLE;
}
@Override
public void finishInsert(ConnectorSession session, ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments) {}
@Override
public List<ConnectorTableLayoutResult> getTableLayouts(
ConnectorSession session,
ConnectorTableHandle handle,
Constraint<ColumnHandle> constraint,
Optional<Set<ColumnHandle>> desiredColumns)
{
requireNonNull(handle, "handle is null");
checkArgument(handle instanceof BlackHoleTableHandle);
BlackHoleTableHandle blackHoleHandle = (BlackHoleTableHandle) handle;
BlackHoleTableLayoutHandle layoutHandle = new BlackHoleTableLayoutHandle(
blackHoleHandle.getSplitCount(),
blackHoleHandle.getPagesPerSplit(),
blackHoleHandle.getRowsPerPage(),
blackHoleHandle.getFieldsLength());
return ImmutableList.of(new ConnectorTableLayoutResult(getTableLayout(session, layoutHandle), TupleDomain.all()));
}
@Override
public ConnectorTableLayout getTableLayout(ConnectorSession session, ConnectorTableLayoutHandle handle)
{
return new ConnectorTableLayout(
handle,
Optional.empty(),
TupleDomain.none(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
ImmutableList.of());
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package sql.generic.ddl;
import hydra.Log;
import hydra.MasterController;
import java.math.BigDecimal;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import sql.generic.SQLOldTest;
import sql.SQLHelper;
import sql.generic.dmlstatements.GenericDML;
import util.TestException;
import util.TestHelper;
/**
* Executor
*
* @author Namrata Thanvi
*/
public class Executor {
Connection gConn, dConn;
public Connection getGConn() {
return gConn;
}
public void setGConn(Connection conn) {
gConn = conn;
}
public Connection getDConn() {
return dConn;
}
public void setDConn(Connection conn) {
dConn = conn;
}
PreparedStatement ps;
public enum ExceptionMapKeys {
DERBY, GEMXD
};
public enum ExceptionAt {
DERBY, GEMXD, NONE
};
public Executor(Connection conn) {
this.gConn = conn;
this.dConn = null;
}
public Executor(Connection dConn, Connection gConn) {
this.dConn = dConn;
this.gConn = gConn;
}
// Trying to create something generic with proper ExceptionHanldling at one
// Place
public ExceptionAt executeOnBothDb(String derby, String gemxd,
GenericExceptionHandler exceptionHandler) {
boolean continueWithGemxd = true;
try {
dConn.createStatement().executeUpdate(derby);
Log.getLogWriter().info("Derby - Executed " + derby);
} catch (SQLException se) {
Log.getLogWriter().info("Derby Exception - " + se.getMessage());
continueWithGemxd = exceptionHandler.handleDerbyException(dConn, se);
}
exceptionHandler.afterDerbyExecution();
if (!continueWithGemxd)
return ExceptionAt.DERBY;
try {
gConn.createStatement().executeUpdate(gemxd);
Log.getLogWriter().info("Gemxd - Executed " + gemxd);
} catch (SQLException se) {
Log.getLogWriter().info("Gemxd Exception - " + se.getMessage());
if (exceptionHandler.handleGfxdException(se) == false) {
return ExceptionAt.GEMXD;
}
}
exceptionHandler.afterGemxdExecution();
return ExceptionAt.NONE;
}
// executeQueryOnbothDb - new implementation
public ResultSet[] executeQueryOnBothDB(String queryDerby, String queryGemxd,
int maxNumOfRetryDerby, int maxNumOfRetryGemxd, int retrySleepMs,
GenericExceptionHandler exceptionHandler, boolean[] success) {
int count = 0;
ResultSet[] rs = new ResultSet[2];
try {
rs[0] = executeQuery(queryDerby, maxNumOfRetryDerby, retrySleepMs,
"Derby");
success[0] = true;
exceptionHandler.afterDerbyExecution();
} catch (SQLException se) {
exceptionHandler.handleDerbyException(dConn, se);
}
if (rs[0] != null) {
try {
rs[1] = executeQuery(queryGemxd, maxNumOfRetryGemxd, retrySleepMs,
"Gemxd");
success[1] = true;
if (rs[1] == null) {
success[1] = false;
handleNullRs();
}
exceptionHandler.afterGemxdExecution();
} catch (SQLException se) {
exceptionHandler.handleGfxdException(se);
}
}
return rs;
}
public ResultSet executeQueyOnGfxdOnly(String query, int maxNumOfTry,
int sleep, GenericExceptionHandler exceptionHandler) {
ResultSet rs = null;
try {
rs = executeQuery(query, maxNumOfTry, sleep, "Gfxd");
if (rs == null) {
handleNullRs();
}
} catch (SQLException se) {
exceptionHandler.handleGfxdExceptionOnly(se);
}
return rs;
}
public ResultSet executeQuery(String query, int maxNumOfTry, int sleep,
String db) throws SQLException {
int count = 0;
boolean success = false;
ResultSet rs = null;
while (count < maxNumOfTry && !success) {
count++;
MasterController.sleepForMs(GenericDML.rand.nextInt(sleep));
PreparedStatement ps = dConn.prepareStatement(query);
rs = ps.executeQuery();
success = true;
}
if (count >= maxNumOfTry && !success) {
Log.getLogWriter().info(
"Could not get the lock to finisht the operation in " + db
+ ", abort this operation");
}
return rs;
}
public void handleNullRs() {
if (SQLOldTest.isHATest) {
Log.getLogWriter().info("Testing HA and did not get GFXD result set");
}
else if (SQLOldTest.setCriticalHeap) {
Log.getLogWriter().info("got XCL54 and does not get query result");
}
else
throw new TestException("Not able to get gfe result set after retry");
}
public boolean executeOnGfxdOnly(String executionString,
GenericExceptionHandler exceptionHandler) {
try {
Statement stmt = gConn.createStatement();
stmt.executeUpdate(executionString);
Log.getLogWriter().info(" Completed Execution of ... " + executionString);
SQLWarning warning = stmt.getWarnings(); // test to see there is a warning
if (warning != null) {
SQLHelper.printSQLWarning(warning);
return exceptionHandler.handleGfxdWarningsOnly(warning);
}
} catch (SQLException se) {
exceptionHandler.handleGfxdExceptionOnly(se);
return false;
}
return true;
}
public void execute(String executionString) throws SQLException {
gConn.createStatement().execute(executionString);
}
public ResultSet executeQuery(String executionString) throws SQLException {
return gConn.createStatement().executeQuery(executionString);
}
public int execute(String executionString, List<Object> parameters)
throws SQLException {
int columnIndex = 1;
ps = gConn.prepareStatement(executionString);
for (Object param : parameters) {
setValues(param, columnIndex++);
}
return ps.executeUpdate();
}
public HashMap<ExceptionMapKeys, SQLException> executeOnlyOnGFXD(
String executionString) {
HashMap<ExceptionMapKeys, SQLException> exceptionMap = new HashMap<ExceptionMapKeys, SQLException>();
try {
gConn.createStatement().execute(executionString);
} catch (SQLException se) {
exceptionMap.put(ExceptionMapKeys.GEMXD, se);
}
return exceptionMap;
}
public HashMap<ExceptionMapKeys, SQLException> executeOnBothDb(
String executionString) {
HashMap<ExceptionMapKeys, SQLException> exceptionMap = new HashMap<ExceptionMapKeys, SQLException>();
if (SQLOldTest.hasDerbyServer) {
try {
dConn.createStatement().execute(executionString);
} catch (SQLException se) {
exceptionMap.put(ExceptionMapKeys.DERBY, se);
}
}
try {
gConn.createStatement().execute(executionString);
} catch (SQLException se) {
exceptionMap.put(ExceptionMapKeys.GEMXD, se);
}
return exceptionMap;
}
public ResultSet executeQuery(String executionString, List<Object> parameters)
throws SQLException {
int columnIndex = 1;
ps = gConn.prepareStatement(executionString);
for (Object param : parameters) {
setValues(param, columnIndex++);
}
return ps.executeQuery();
}
public Connection getConnection() {
return gConn;
}
public void setValues(Object value, int columnIndex) throws SQLException {
if (value instanceof Integer) {
ps.setInt(columnIndex, (Integer)value);
}
else if (value instanceof Long) {
ps.setLong(columnIndex, (Long)value);
}
else if (value instanceof Float) {
ps.setFloat(columnIndex, (Float)value);
}
else if (value instanceof Double) {
ps.setDouble(columnIndex, (Double)value);
}
else if (value instanceof BigDecimal) {
ps.setBigDecimal(columnIndex, (BigDecimal)value);
}
else if (value instanceof Boolean) {
ps.setBoolean(columnIndex, (Boolean)value);
}
else if (value instanceof Blob) {
ps.setBlob(columnIndex, (Blob)value);
}
else if (value instanceof String) {
ps.setString(columnIndex, (String)value);
}
else if (value instanceof Date) {
ps.setDate(columnIndex, (Date)value);
}
else if (value instanceof Byte[]) {
ps.setBytes(columnIndex, (byte[])value);
}
else if (value instanceof Short) {
ps.setShort(columnIndex, (Short)value);
}
else if (value instanceof Time) {
ps.setTime(columnIndex, (Time)value);
}
else if (value instanceof Timestamp) {
ps.setTimestamp(columnIndex, (Timestamp)value);
}
else if (value instanceof Clob) {
ps.setClob(columnIndex, (Clob)value);
}
else {
Log.getLogWriter().info(
"This dataType is yet not supported : " + value.getClass()
+ " for object " + value);
}
}
public void rollback() {
if (dConn != null)
rollbackDerby();
rollbackGfxd();
}
public void commit() {
if (dConn != null)
commitDerby();
commitGfxd();
}
public void commitDerby() {
try {
Log.getLogWriter().info(" Derby - Commit started ");
dConn.commit();
Log.getLogWriter().info(" Derby - Commit Completed ");
} catch (SQLException se) {
throw new TestException(" Error while commiting the derby database "
+ " sqlState : " + se.getSQLState() + " error message : "
+ se.getMessage() + TestHelper.getStackTrace(se));
}
}
public void commitGfxd() {
try {
Log.getLogWriter().info(" Gfxd - Commit started ");
gConn.commit();
Log.getLogWriter().info(" Gfxd - Commit Completed ");
} catch (SQLException se) {
throw new TestException(" Error while commiting the Gfxd database "
+ " sqlState : " + se.getSQLState() + " error message : "
+ se.getMessage() + TestHelper.getStackTrace(se));
}
}
public void rollbackDerby() {
try {
Log.getLogWriter().info(" Derby - Rollback started ");
dConn.rollback();
Log.getLogWriter().info(" Derby - Rollback Completed ");
} catch (SQLException se) {
throw new TestException(" Error while doing rollback derby database "
+ " sqlState : " + se.getSQLState() + " error message : "
+ se.getMessage() + TestHelper.getStackTrace(se));
}
}
public void rollbackGfxd() {
try {
Log.getLogWriter().info(" Gfxd - Rollback started ");
gConn.rollback();
Log.getLogWriter().info(" Gfxd - Rollback Completed ");
} catch (SQLException se) {
throw new TestException(" Error while doing rollback Gfxd database "
+ " sqlState : " + se.getSQLState() + " error message : "
+ se.getMessage() + TestHelper.getStackTrace(se));
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.rules.java;
import static com.google.common.base.Strings.isNullOrEmpty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.Runfiles;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.CustomCommandLine;
import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction;
import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.ComputedSubstitution;
import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.Substitution;
import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.Template;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder;
import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression;
import com.google.devtools.build.lib.rules.java.JavaCommon;
import com.google.devtools.build.lib.rules.java.JavaCompilationArtifacts;
import com.google.devtools.build.lib.rules.java.JavaCompilationHelper;
import com.google.devtools.build.lib.rules.java.JavaConfiguration;
import com.google.devtools.build.lib.rules.java.JavaHelper;
import com.google.devtools.build.lib.rules.java.JavaRunfilesProvider;
import com.google.devtools.build.lib.rules.java.JavaSemantics;
import com.google.devtools.build.lib.rules.java.JavaTargetAttributes;
import com.google.devtools.build.lib.rules.java.JavaUtil;
import com.google.devtools.build.lib.rules.java.Jvm;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.util.ShellEscaper;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Semantics for Bazel Java rules
*/
public class BazelJavaSemantics implements JavaSemantics {
public static final BazelJavaSemantics INSTANCE = new BazelJavaSemantics();
private static final Template STUB_SCRIPT =
Template.forResource(BazelJavaSemantics.class, "java_stub_template.txt");
private static final String JAVABUILDER_CLASS_NAME =
"com.google.devtools.build.buildjar.BazelJavaBuilder";
private static final Label JDK_LAUNCHER_LABEL =
Label.parseAbsoluteUnchecked("@bazel_tools//third_party/java/jdk:jdk_launcher");
private BazelJavaSemantics() {
}
@Override
public Label getJdkLauncherLabel() {
return JDK_LAUNCHER_LABEL;
}
private boolean isJavaBinaryOrJavaTest(RuleContext ruleContext) {
String ruleClass = ruleContext.getRule().getRuleClass();
return ruleClass.equals("java_binary") || ruleClass.equals("java_test");
}
@Override
public void checkRule(RuleContext ruleContext, JavaCommon javaCommon) {
}
private String getMainClassInternal(RuleContext ruleContext, ImmutableList<Artifact> sources) {
if (!ruleContext.attributes().get("create_executable", Type.BOOLEAN)) {
return null;
}
if (ruleContext.attributes().get("use_testrunner", Type.BOOLEAN)
&& !useLegacyJavaTest(ruleContext)) {
return "com.google.testing.junit.runner.BazelTestRunner";
}
String mainClass = ruleContext.attributes().get("main_class", Type.STRING);
if (mainClass.isEmpty()) {
mainClass = JavaCommon.determinePrimaryClass(ruleContext, sources);
}
return mainClass;
}
private void checkMainClass(RuleContext ruleContext, ImmutableList<Artifact> sources) {
boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN);
String mainClass = getMainClassInternal(ruleContext, sources);
if (!createExecutable && !isNullOrEmpty(mainClass)) {
ruleContext.ruleError("main class must not be specified when executable is not created");
}
if (createExecutable && isNullOrEmpty(mainClass)) {
if (sources.isEmpty()) {
ruleContext.ruleError("need at least one of 'main_class' or Java source files");
}
mainClass = JavaCommon.determinePrimaryClass(ruleContext, sources);
if (mainClass == null) {
ruleContext.ruleError("cannot determine main class for launching "
+ "(found neither a source file '" + ruleContext.getTarget().getName()
+ ".java', nor a main_class attribute, and package name "
+ "doesn't include 'java' or 'javatests')");
}
}
}
@Override
public String getMainClass(RuleContext ruleContext, ImmutableList<Artifact> sources) {
checkMainClass(ruleContext, sources);
return getMainClassInternal(ruleContext, sources);
}
@Override
public ImmutableList<Artifact> collectResources(RuleContext ruleContext) {
if (!ruleContext.getRule().isAttrDefined("resources", BuildType.LABEL_LIST)) {
return ImmutableList.of();
}
return ruleContext.getPrerequisiteArtifacts("resources", Mode.TARGET).list();
}
@Override
public Artifact createInstrumentationMetadataArtifact(
RuleContext ruleContext, Artifact outputJar) {
return null;
}
@Override
public void buildJavaCommandLine(Collection<Artifact> outputs, BuildConfiguration configuration,
CustomCommandLine.Builder result) {
}
@Override
public void createStubAction(RuleContext ruleContext, final JavaCommon javaCommon,
List<String> jvmFlags, Artifact executable, String javaStartClass,
String javaExecutable) {
Preconditions.checkState(ruleContext.getConfiguration().hasFragment(Jvm.class));
Preconditions.checkNotNull(jvmFlags);
Preconditions.checkNotNull(executable);
Preconditions.checkNotNull(javaStartClass);
Preconditions.checkNotNull(javaExecutable);
List<Substitution> arguments = new ArrayList<>();
String workspacePrefix = ruleContext.getWorkspaceName();
if (!workspacePrefix.isEmpty()) {
workspacePrefix += "/";
}
arguments.add(Substitution.of("%workspace_prefix%", workspacePrefix));
arguments.add(Substitution.of("%javabin%", javaExecutable));
arguments.add(Substitution.of("%needs_runfiles%",
ruleContext.getFragment(Jvm.class).getJavaExecutable().isAbsolute() ? "0" : "1"));
arguments.add(new ComputedSubstitution("%classpath%") {
@Override
public String getValue() {
StringBuilder buffer = new StringBuilder();
Iterable<Artifact> jars = javaCommon.getRuntimeClasspath();
appendRunfilesRelativeEntries(buffer, jars, ':');
return buffer.toString();
}
});
arguments.add(Substitution.of("%java_start_class%",
ShellEscaper.escapeString(javaStartClass)));
arguments.add(Substitution.ofSpaceSeparatedList("%jvm_flags%", ImmutableList.copyOf(jvmFlags)));
ruleContext.registerAction(new TemplateExpansionAction(
ruleContext.getActionOwner(), executable, STUB_SCRIPT, arguments, true));
}
/**
* Builds a class path by concatenating the root relative paths of the artifacts separated by the
* delimiter. Each relative path entry is prepended with "${RUNPATH}" which will be expanded by
* the stub script at runtime, to either "${JAVA_RUNFILES}/" or if we are lucky, the empty
* string.
*
* @param buffer the buffer to use for concatenating the entries
* @param artifacts the entries to concatenate in the buffer
* @param delimiter the delimiter character to separate the entries
*/
private static void appendRunfilesRelativeEntries(StringBuilder buffer,
Iterable<Artifact> artifacts, char delimiter) {
for (Artifact artifact : artifacts) {
if (buffer.length() > 0) {
buffer.append(delimiter);
}
buffer.append("${RUNPATH}");
buffer.append(artifact.getRootRelativePath().getPathString());
}
}
private TransitiveInfoCollection getTestSupport(RuleContext ruleContext) {
if (!isJavaBinaryOrJavaTest(ruleContext)) {
return null;
}
if (useLegacyJavaTest(ruleContext)) {
return null;
}
boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN);
if (createExecutable && ruleContext.attributes().get("use_testrunner", Type.BOOLEAN)) {
return Iterables.getOnlyElement(ruleContext.getPrerequisites("$testsupport", Mode.TARGET));
} else {
return null;
}
}
@Override
public void addRunfilesForBinary(RuleContext ruleContext, Artifact launcher,
Runfiles.Builder runfilesBuilder) {
TransitiveInfoCollection testSupport = getTestSupport(ruleContext);
if (testSupport != null) {
runfilesBuilder.addTarget(testSupport, JavaRunfilesProvider.TO_RUNFILES);
runfilesBuilder.addTarget(testSupport, RunfilesProvider.DEFAULT_RUNFILES);
}
}
@Override
public void addRunfilesForLibrary(RuleContext ruleContext, Runfiles.Builder runfilesBuilder) {
}
@Override
public void collectTargetsTreatedAsDeps(
RuleContext ruleContext, ImmutableList.Builder<TransitiveInfoCollection> builder) {
TransitiveInfoCollection testSupport = getTestSupport(ruleContext);
if (testSupport != null) {
// TODO(bazel-team): The testsupport is used as the test framework
// and really only needs to be on the runtime, not compile-time
// classpath.
builder.add(testSupport);
}
}
@Override
public Iterable<String> getExtraJavacOpts(RuleContext ruleContext) {
return ImmutableList.<String>of();
}
@Override
public void addProviders(RuleContext ruleContext,
JavaCommon javaCommon,
List<String> jvmFlags,
Artifact classJar,
Artifact srcJar,
Artifact genJar,
Artifact gensrcJar,
ImmutableMap<Artifact, Artifact> compilationToRuntimeJarMap,
NestedSetBuilder<Artifact> filesBuilder,
RuleConfiguredTargetBuilder ruleBuilder) {
}
// TODO(dmarting): simplify that logic when we remove the legacy Bazel java_test behavior.
private String getPrimaryClassLegacy(RuleContext ruleContext, ImmutableList<Artifact> sources) {
boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN);
if (!createExecutable) {
return null;
}
return getMainClassInternal(ruleContext, sources);
}
private String getPrimaryClassNew(RuleContext ruleContext, ImmutableList<Artifact> sources) {
boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN);
if (!createExecutable) {
return null;
}
boolean useTestrunner = ruleContext.attributes().get("use_testrunner", Type.BOOLEAN);
String testClass = ruleContext.getRule().isAttrDefined("test_class", Type.STRING)
? ruleContext.attributes().get("test_class", Type.STRING) : "";
if (useTestrunner) {
if (testClass.isEmpty()) {
testClass = JavaCommon.determinePrimaryClass(ruleContext, sources);
if (testClass == null) {
ruleContext.ruleError("cannot determine junit.framework.Test class "
+ "(Found no source file '" + ruleContext.getTarget().getName()
+ ".java' and package name doesn't include 'java' or 'javatests'. "
+ "You might want to rename the rule or add a 'test_class' "
+ "attribute.)");
}
}
return testClass;
} else {
if (!testClass.isEmpty()) {
ruleContext.attributeError("test_class", "this attribute is only meaningful to "
+ "BazelTestRunner, but you are not using it (use_testrunner = 0)");
}
return getMainClassInternal(ruleContext, sources);
}
}
@Override
public String getPrimaryClass(RuleContext ruleContext, ImmutableList<Artifact> sources) {
return useLegacyJavaTest(ruleContext)
? getPrimaryClassLegacy(ruleContext, sources)
: getPrimaryClassNew(ruleContext, sources);
}
@Override
public Iterable<String> getJvmFlags(
RuleContext ruleContext, ImmutableList<Artifact> sources, List<String> userJvmFlags) {
ImmutableList.Builder<String> jvmFlags = ImmutableList.builder();
jvmFlags.addAll(userJvmFlags);
if (!useLegacyJavaTest(ruleContext)) {
if (ruleContext.attributes().get("use_testrunner", Type.BOOLEAN)) {
String testClass = ruleContext.getRule().isAttrDefined("test_class", Type.STRING)
? ruleContext.attributes().get("test_class", Type.STRING) : "";
if (testClass.isEmpty()) {
testClass = JavaCommon.determinePrimaryClass(ruleContext, sources);
}
if (testClass == null) {
ruleContext.ruleError("cannot determine test class");
} else {
// Always run junit tests with -ea (enable assertion)
jvmFlags.add("-ea");
// "suite" is a misnomer.
jvmFlags.add("-Dbazel.test_suite=" + ShellEscaper.escapeString(testClass));
}
}
}
return jvmFlags.build();
}
@Override
public String addCoverageSupport(JavaCompilationHelper helper,
JavaTargetAttributes.Builder attributes,
Artifact executable, Artifact instrumentationMetadata,
JavaCompilationArtifacts.Builder javaArtifactsBuilder, String mainClass) {
return mainClass;
}
@Override
public CustomCommandLine buildSingleJarCommandLine(BuildConfiguration configuration,
Artifact output, String mainClass, ImmutableList<String> manifestLines,
Iterable<Artifact> buildInfoFiles, ImmutableList<Artifact> resources,
Iterable<Artifact> classpath, boolean includeBuildData,
Compression compression, Artifact launcher) {
return DeployArchiveBuilder.defaultSingleJarCommandLine(output, mainClass, manifestLines,
buildInfoFiles, resources, classpath, includeBuildData, compression, launcher).build();
}
@Override
public ImmutableList<Artifact> translate(RuleContext ruleContext, JavaConfiguration javaConfig,
List<Artifact> messages) {
return ImmutableList.<Artifact>of();
}
@Override
public Artifact getLauncher(RuleContext ruleContext, JavaCommon common,
DeployArchiveBuilder deployArchiveBuilder, Runfiles.Builder runfilesBuilder,
List<String> jvmFlags, JavaTargetAttributes.Builder attributesBuilder, boolean shouldStrip) {
return JavaHelper.launcherArtifactForTarget(this, ruleContext);
}
@Override
public void addDependenciesForRunfiles(RuleContext ruleContext, Runfiles.Builder builder) {
}
@Override
public boolean forceUseJavaLauncherTarget(RuleContext ruleContext) {
return false;
}
@Override
public void addArtifactToJavaTargetAttribute(JavaTargetAttributes.Builder builder,
Artifact srcArtifact) {
}
@Override
public void commonDependencyProcessing(RuleContext ruleContext,
JavaTargetAttributes.Builder attributes,
Collection<? extends TransitiveInfoCollection> deps) {
}
@Override
public PathFragment getDefaultJavaResourcePath(PathFragment path) {
// Look for src/.../resources to match Maven repository structure.
for (int i = 0; i < path.segmentCount() - 2; ++i) {
if (path.getSegment(i).equals("src") && path.getSegment(i + 2).equals("resources")) {
return path.subFragment(i + 3, path.segmentCount());
}
}
PathFragment javaPath = JavaUtil.getJavaPath(path);
return javaPath == null ? path : javaPath;
}
@Override
public List<String> getExtraArguments(RuleContext ruleContext, ImmutableList<Artifact> sources) {
if (ruleContext.getRule().getRuleClass().equals("java_test")) {
if (useLegacyJavaTest(ruleContext)) {
if (ruleContext.getConfiguration().getTestArguments().isEmpty()
&& !ruleContext.attributes().isAttributeValueExplicitlySpecified("args")) {
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (Artifact artifact : sources) {
PathFragment path = artifact.getRootRelativePath();
String className = JavaUtil.getJavaFullClassname(FileSystemUtils.removeExtension(path));
if (className != null) {
builder.add(className);
}
}
return builder.build();
}
}
}
return ImmutableList.<String>of();
}
private boolean useLegacyJavaTest(RuleContext ruleContext) {
return !ruleContext.attributes().isAttributeValueExplicitlySpecified("test_class")
&& ruleContext.getFragment(JavaConfiguration.class).useLegacyBazelJavaTest();
}
@Override
public String getJavaBuilderMainClass() {
return JAVABUILDER_CLASS_NAME;
}
@Override
public Artifact getProtoMapping(RuleContext ruleContext) throws InterruptedException {
return null;
}
}
| |
package org.ovirt.engine.api.restapi.resource;
import java.util.ArrayList;
import java.util.HashMap;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.junit.Test;
import org.ovirt.engine.api.model.Action;
import org.ovirt.engine.api.model.Cluster;
import org.ovirt.engine.api.model.CreationStatus;
import org.ovirt.engine.api.model.StorageDomain;
import org.ovirt.engine.api.model.Template;
import org.ovirt.engine.core.common.action.ImprotVmTemplateParameters;
import org.ovirt.engine.core.common.action.VdcActionParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.businessentities.AsyncTaskStatus;
import org.ovirt.engine.core.common.businessentities.AsyncTaskStatusEnum;
import org.ovirt.engine.core.common.businessentities.storage_domains;
import org.ovirt.engine.core.common.businessentities.StorageDomainType;
import org.ovirt.engine.core.common.businessentities.VDSGroup;
import org.ovirt.engine.core.common.businessentities.VmTemplate;
import org.ovirt.engine.core.common.interfaces.SearchType;
import org.ovirt.engine.core.common.queries.DiskImageList;
import org.ovirt.engine.core.common.queries.GetAllFromExportDomainQueryParamenters;
import org.ovirt.engine.core.common.queries.GetVmTemplateParameters;
import org.ovirt.engine.core.common.queries.StorageDomainQueryParametersBase;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.Guid;
import static org.ovirt.engine.api.restapi.resource.BackendStorageDomainTemplatesResourceTest.setUpStorageDomain;
import static org.ovirt.engine.api.restapi.resource.BackendStorageDomainTemplatesResourceTest.setUpStoragePool;
import static org.ovirt.engine.api.restapi.resource.BackendTemplatesResourceTest.setUpEntityExpectations;
import static org.ovirt.engine.api.restapi.resource.BackendTemplatesResourceTest.verifyModelSpecific;
public class BackendStorageDomainTemplateResourceTest
extends AbstractBackendSubResourceTest<Template,
VmTemplate,
BackendStorageDomainTemplateResource> {
private static final Guid TEMPLATE_ID = GUIDS[1];
private static final Guid DATA_CENTER_ID = GUIDS[0];
private static final Guid STORAGE_DOMAIN_ID = GUIDS[GUIDS.length-1];
private static final String URL_BASE = "storagedomains/" + STORAGE_DOMAIN_ID + "/templates/" + TEMPLATE_ID;
public BackendStorageDomainTemplateResourceTest() {
super(new BackendStorageDomainTemplateResource(new BackendStorageDomainTemplatesResource(STORAGE_DOMAIN_ID),
TEMPLATE_ID.toString()));
}
protected void init() {
super.init();
initResource(resource.getParent());
}
protected void setUriInfo(UriInfo uriInfo) {
super.setUriInfo(uriInfo);
resource.getParent().setUriInfo(uriInfo);
}
@Test
public void testBadGuid() throws Exception {
control.replay();
try {
new BackendStorageDomainTemplateResource(null, "foo");
fail("expected WebApplicationException");
} catch (WebApplicationException wae) {
verifyNotFoundException(wae);
}
}
@Test
public void testGetExportNotFound() throws Exception {
setUpGetStorageDomainExpectations(StorageDomainType.ImportExport);
setUpGetEntityExpectations(StorageDomainType.ImportExport, STORAGE_DOMAIN_ID, true);
setUriInfo(setUpBasicUriExpectations());
control.replay();
try {
resource.get();
fail("expected WebApplicationException");
} catch (WebApplicationException wae) {
verifyNotFoundException(wae);
}
}
@Test
public void testGetExport() throws Exception {
testGet(StorageDomainType.ImportExport);
}
protected void testGet(StorageDomainType domainType) throws Exception {
setUpGetStorageDomainExpectations(domainType);
setUpGetEntityExpectations(domainType, STORAGE_DOMAIN_ID);
setUriInfo(setUpBasicUriExpectations());
control.replay();
verifyModel(resource.get(), 1);
}
@Test
public void testImportNotFound() throws Exception {
Action action = new Action();
action.setStorageDomain(new StorageDomain());
action.getStorageDomain().setId(GUIDS[2].toString());
action.setCluster(new Cluster());
action.getCluster().setId(GUIDS[1].toString());
setUpGetEntityExpectations(StorageDomainType.ImportExport, GUIDS[2], true);
setUpGetDataCenterByStorageDomainExpectations(STORAGE_DOMAIN_ID);
control.replay();
try {
resource.doImport(action);
fail("expected WebApplicationException");
} catch (WebApplicationException wae) {
verifyNotFoundException(wae);
}
}
@Test
public void testImport() throws Exception {
StorageDomain storageDomain = new StorageDomain();
storageDomain.setId(GUIDS[2].toString());
Cluster cluster = new Cluster();
cluster.setId(GUIDS[1].toString());
setUpGetDataCenterByStorageDomainExpectations(STORAGE_DOMAIN_ID);
doTestImport(storageDomain, cluster);
}
@Test
public void testImportWithStorageDomainName() throws Exception {
setUpGetEntityExpectations("Storage: name=" + NAMES[2],
SearchType.StorageDomain,
getStorageDomain(2));
setUpGetDataCenterByStorageDomainExpectations(STORAGE_DOMAIN_ID);
StorageDomain storageDomain = new StorageDomain();
storageDomain.setName(NAMES[2]);
Cluster cluster = new Cluster();
cluster.setId(GUIDS[1].toString());
doTestImport(storageDomain, cluster);
}
@Test
public void testImportWithClusterName() throws Exception {
setUpGetEntityExpectations("Cluster: name=" + NAMES[1],
SearchType.Cluster,
getCluster(1));
StorageDomain storageDomain = new StorageDomain();
storageDomain.setId(GUIDS[2].toString());
Cluster cluster = new Cluster();
cluster.setName(NAMES[1]);
setUpGetDataCenterByStorageDomainExpectations(STORAGE_DOMAIN_ID);
doTestImport(storageDomain, cluster);
}
private void setUpGetDataCenterByStorageDomainExpectations(Guid id) {
setUpEntityQueryExpectations(VdcQueryType.GetStoragePoolsByStorageDomainId,
StorageDomainQueryParametersBase.class,
new String[] { "StorageDomainId" },
new Object[] { id },
setUpStoragePool());
}
public void doTestImport(StorageDomain storageDomain, Cluster cluster) throws Exception {
setUpGetEntityExpectations(1, StorageDomainType.ImportExport, GUIDS[2]);
setUriInfo(setUpActionExpectations(VdcActionType.ImportVmTemplate,
ImprotVmTemplateParameters.class,
new String[] { "ContainerId", "StorageDomainId", "SourceDomainId", "DestDomainId", "StoragePoolId", "VdsGroupId" },
new Object[] { TEMPLATE_ID, GUIDS[2], STORAGE_DOMAIN_ID, GUIDS[2], DATA_CENTER_ID, GUIDS[1] }));
Action action = new Action();
action.setStorageDomain(storageDomain);
action.setCluster(cluster);
verifyActionResponse(resource.doImport(action));
}
@Test
public void testImportAsyncPending() throws Exception {
doTestImportAsync(AsyncTaskStatusEnum.init, CreationStatus.PENDING);
}
@Test
public void testImportAsyncInProgress() throws Exception {
doTestImportAsync(AsyncTaskStatusEnum.running, CreationStatus.IN_PROGRESS);
}
@Test
public void testImportAsyncFinished() throws Exception {
doTestImportAsync(AsyncTaskStatusEnum.finished, CreationStatus.COMPLETE);
}
private void doTestImportAsync(AsyncTaskStatusEnum asyncStatus, CreationStatus actionStatus) throws Exception {
setUpGetEntityExpectations(1, StorageDomainType.ImportExport, GUIDS[2]);
setUpGetDataCenterByStorageDomainExpectations(GUIDS[3]);
setUriInfo(setUpActionExpectations(
VdcActionType.ImportVmTemplate,
ImprotVmTemplateParameters.class,
new String[] { "ContainerId", "StorageDomainId", "SourceDomainId", "DestDomainId", "StoragePoolId", "VdsGroupId" },
new Object[] { TEMPLATE_ID, GUIDS[2], STORAGE_DOMAIN_ID, GUIDS[2], DATA_CENTER_ID, GUIDS[1] },
asList(GUIDS[1]),
asList(new AsyncTaskStatus(asyncStatus))));
StorageDomain storageDomain = new StorageDomain();
storageDomain.setId(GUIDS[2].toString());
Cluster cluster = new Cluster();
cluster.setId(GUIDS[1].toString());
Action action = new Action();
action.setStorageDomain(storageDomain);
action.setCluster(cluster);
Response response = resource.doImport(action);
verifyActionResponse(response, URL_BASE, true, null, null);
action = (Action)response.getEntity();
assertTrue(action.isSetStatus());
assertEquals(actionStatus.value(), action.getStatus().getState());
}
@Test
public void testIncompleteImport() throws Exception {
setUriInfo(setUpBasicUriExpectations());
try {
control.replay();
resource.doImport(new Action());
fail("expected WebApplicationException on incomplete parameters");
} catch (WebApplicationException wae) {
verifyIncompleteException(wae, "Action", "doImport", "cluster.id|name", "storageDomain.id|name");
}
}
protected void setUpGetStorageDomainExpectations(StorageDomainType domainType) throws Exception {
setUpEntityQueryExpectations(VdcQueryType.GetStorageDomainById,
StorageDomainQueryParametersBase.class,
new String[] { "StorageDomainId" },
new Object[] { STORAGE_DOMAIN_ID },
setUpStorageDomain(domainType));
}
protected void setUpGetEntityExpectations(int times, StorageDomainType domainType, Guid getStoragePoolsByStorageDomainId) throws Exception {
while (times-- > 0) {
setUpGetEntityExpectations(domainType, getStoragePoolsByStorageDomainId);
}
}
protected void setUpGetEntityExpectations(StorageDomainType domainType, Guid getStoragePoolsByStorageDomainId) throws Exception {
setUpGetEntityExpectations(domainType, getStoragePoolsByStorageDomainId, false);
}
protected void setUpGetEntityExpectations(StorageDomainType domainType, Guid getStoragePoolsByStorageDomainId, boolean notFound) throws Exception {
switch (domainType) {
case Data:
setUpEntityQueryExpectations(VdcQueryType.GetVmTemplate,
GetVmTemplateParameters.class,
new String[] { "Id" },
new Object[] { TEMPLATE_ID },
notFound ? null : getEntity(1));
break;
case ImportExport:
setUpEntityQueryExpectations(VdcQueryType.GetStoragePoolsByStorageDomainId,
StorageDomainQueryParametersBase.class,
new String[] { "StorageDomainId" },
new Object[] { getStoragePoolsByStorageDomainId },
setUpStoragePool());
setUpEntityQueryExpectations(VdcQueryType.GetTemplatesFromExportDomain,
GetAllFromExportDomainQueryParamenters.class,
new String[] { "StoragePoolId", "StorageDomainId" },
new Object[] { DATA_CENTER_ID, STORAGE_DOMAIN_ID },
setUpTemplates(notFound));
break;
default:
break;
}
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz,
String[] names,
Object[] values) {
return setUpActionExpectations(task, clz, names, values, true, true, null, null, true);
}
protected UriInfo setUpActionExpectations(VdcActionType task,
Class<? extends VdcActionParametersBase> clz,
String[] names,
Object[] values,
ArrayList<Guid> asyncTasks,
ArrayList<AsyncTaskStatus> asyncStatuses) {
String uri = URL_BASE + "/action";
return setUpActionExpectations(task, clz, names, values, true, true, null, asyncTasks, asyncStatuses, uri, true);
}
private void verifyActionResponse(Response r) throws Exception {
verifyActionResponse(r, URL_BASE, false);
}
@Override
protected VmTemplate getEntity(int index) {
return setUpEntityExpectations(control.createMock(VmTemplate.class), index);
}
protected HashMap<VmTemplate, DiskImageList> setUpTemplates(boolean notFound) {
HashMap<VmTemplate, DiskImageList> ret = new HashMap<VmTemplate, DiskImageList>();
if (notFound) {
return ret;
}
for (int i = 0; i < NAMES.length; i++) {
ret.put(getEntity(i), new DiskImageList());
}
return ret;
}
protected void verifyModel(Template model, int index) {
super.verifyModel(model, index);
verifyModelSpecific(model, index);
}
protected storage_domains getStorageDomain(int idx) {
storage_domains dom = new storage_domains();
dom.setId(GUIDS[idx]);
return dom;
}
protected VDSGroup getCluster(int idx) {
VDSGroup cluster = new VDSGroup();
cluster.setId(GUIDS[idx]);
return cluster;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.axis2.httpbinding;
import junit.framework.TestCase;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.params.DefaultHttpParams;
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.ode.bpel.epr.MutableEndpoint;
import org.apache.ode.bpel.iapi.*;
import org.apache.ode.utils.DOMUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import javax.wsdl.Binding;
import javax.wsdl.Definition;
import javax.wsdl.Operation;
import javax.wsdl.Port;
import javax.wsdl.PortType;
import javax.wsdl.Service;
import javax.wsdl.extensions.http.HTTPAddress;
import javax.wsdl.factory.WSDLFactory;
import javax.wsdl.xml.WSDLReader;
import javax.xml.namespace.QName;
import java.net.URL;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Collections;
/**
* @author <a href="mailto:midon@intalio.com">Alexis Midon</a>
*/
public class HttpMethodConverterTest extends TestCase {
protected Definition definition;
protected HttpMethodConverter deliciousBuilder;
protected Binding deliciousBinding;
protected Port deliciousPort;
protected HttpMethodConverter dummyBuilder;
protected Port dummyPort;
protected Binding dummyBinding;
protected void setUp() throws Exception {
super.setUp();
URL wsdlURL = getClass().getResource("/http-method-builder.wsdl");
WSDLReader wsdlReader = WSDLFactory.newInstance().newWSDLReader();
wsdlReader.setFeature("javax.wsdl.verbose", false);
definition = wsdlReader.readWSDL(wsdlURL.toURI().toString());
Service deliciousService = definition.getService(new QName("http://ode/bpel/unit-test.wsdl", "DeliciousService"));
deliciousPort = deliciousService.getPort("TagHttpPort");
deliciousBinding = deliciousPort.getBinding();
deliciousBuilder = new HttpMethodConverter(definition, deliciousService.getQName(), deliciousPort.getName());
Service dummyService = definition.getService(new QName("http://ode/bpel/unit-test.wsdl", "DummyService"));
dummyPort = dummyService.getPort("DummyServiceHttpport");
dummyBinding = dummyPort.getBinding();
dummyBuilder = new HttpMethodConverter(definition, dummyService.getQName(), dummyPort.getName());
}
public void testGetTag() throws Exception {
String uri = ((HTTPAddress) deliciousPort.getExtensibilityElements().get(0)).getLocationURI();
String expectedUri = uri + "/tag/java";
Element msgEl;
{
Document odeMsg = DOMUtils.newDocument();
msgEl = odeMsg.createElementNS(null, "message");
Element partEl = odeMsg.createElementNS(null, "TagPart");
partEl.setTextContent("java");
odeMsg.appendChild(msgEl);
msgEl.appendChild(partEl);
}
MockMessageExchange odeMex = new MockMessageExchange();
odeMex.op = deliciousBinding.getBindingOperation("getTag", null, null).getOperation();
odeMex.req = new MockMessage(msgEl);
odeMex.epr = new MockEPR(uri);
HttpMethod httpMethod = deliciousBuilder.createHttpRequest(odeMex, new DefaultHttpParams());
assertTrue("GET".equalsIgnoreCase(httpMethod.getName()));
assertTrue(expectedUri.equalsIgnoreCase(httpMethod.getURI().toString()));
}
public void testGetTagWithNoPart() throws Exception {
String uri = ((HTTPAddress) deliciousPort.getExtensibilityElements().get(0)).getLocationURI();
Element msgEl;
{
Document odeMsg = DOMUtils.newDocument();
msgEl = odeMsg.createElementNS(null, "message");
odeMsg.appendChild(msgEl);
}
MockMessageExchange odeMex = new MockMessageExchange();
odeMex.op = deliciousBinding.getBindingOperation("getTag", null, null).getOperation();
odeMex.req = new MockMessage(msgEl);
odeMex.epr = new MockEPR(uri);
try {
HttpMethod httpMethod = deliciousBuilder.createHttpRequest(odeMex, new DefaultHttpParams());
fail("IllegalArgumentException expected because message element is empty.");
} catch (IllegalArgumentException e) {
// expected behavior
}
}
public void testHello() throws Exception {
String uri = ((HTTPAddress) dummyPort.getExtensibilityElements().get(0)).getLocationURI();
String expectedUri = uri + "/" + "DummyService/hello";
Element msgEl, helloEl;
{
Document odeMsg = DOMUtils.newDocument();
msgEl = odeMsg.createElementNS(null, "message");
Element partEl = odeMsg.createElementNS(null, "parameters");
odeMsg.appendChild(msgEl);
msgEl.appendChild(partEl);
helloEl = odeMsg.createElementNS(null, "hello");
helloEl.setTextContent("This is a test. How is it going so far?");
partEl.appendChild(helloEl);
}
MockMessageExchange odeMex = new MockMessageExchange();
odeMex.op = dummyBinding.getBindingOperation("hello", null, null).getOperation();
odeMex.req = new MockMessage(msgEl);
odeMex.epr = new MockEPR(uri);
HttpMethod httpMethod = dummyBuilder.createHttpRequest(odeMex, new DefaultHttpParams());
assertTrue("POST".equalsIgnoreCase(httpMethod.getName()));
assertEquals("Generated URI does not match", expectedUri, httpMethod.getURI().toString());
String b = ((StringRequestEntity) ((PostMethod) httpMethod).getRequestEntity()).getContent();
assertEquals("Invalid body in generated http query", DOMUtils.domToString(helloEl), b);
}
class MockEPR implements EndpointReference, MutableEndpoint {
String url;
MockEPR(String url) {
this.url = url;
}
public String getUrl() {
return url;
}
// other useless methods
public Document toXML() {
return null;
}
public boolean accept(Node node) {
return false;
}
public void fromMap(Map eprMap) {
}
public void set(Node node) {
}
public Map toMap() {
return null;
}
}
class MockMessage implements Message {
Element elt;
MockMessage(Element elt) {
this.elt = elt;
}
public Element getMessage() {
return elt;
}
/**
* Get the header as an element.
*/
public Element getHeader() {
return null;
}
// other useless methods
public Element getHeaderPart(String partName) {
return null;
}
public Map<String, Node> getHeaderParts() {
return Collections.EMPTY_MAP;
}
public Element getPart(String partName) {
return null;
}
public List<String> getParts() {
return null;
}
public QName getType() {
return null;
}
public void setHeaderPart(String name, Element content) {
}
public void setHeaderPart(String name, String content) {
}
public void setMessage(Element msg) {
}
public void setPart(String partName, Element content) {
}
}
class MockMessageExchange implements PartnerRoleMessageExchange {
Operation op;
Message req;
EndpointReference epr;
public Operation getOperation() {
return op;
}
public Message getRequest() {
return req;
}
public EndpointReference getEndpointReference() throws BpelEngineException {
return epr;
}
// other useless methods
public QName getCaller() {
return null;
}
public PartnerRoleChannel getChannel() {
return null;
}
public EndpointReference getMyRoleEndpointReference() {
return null;
}
public void reply(Message response) throws BpelEngineException {
}
public void replyAsync() {
}
public void replyOneWayOk() {
}
public void replyWithFailure(FailureType type, String description, Element details) throws BpelEngineException {
}
public void replyWithFault(QName faultType, Message outputFaultMessage) throws BpelEngineException {
}
public Message createMessage(QName msgType) {
return null;
}
public QName getFault() {
return null;
}
public String getFaultExplanation() {
return null;
}
public Message getFaultResponse() {
return null;
}
public String getMessageExchangeId() throws BpelEngineException {
return null;
}
public MessageExchangePattern getMessageExchangePattern() {
return null;
}
public String getOperationName() throws BpelEngineException {
return null;
}
public PortType getPortType() {
return null;
}
public String getProperty(String key) {
return null;
}
public Set<String> getPropertyNames() {
return null;
}
public Message getResponse() {
return null;
}
public Status getStatus() {
return null;
}
public boolean isTransactionPropagated() throws BpelEngineException {
return false;
}
public void release() {
}
public void setProperty(String key, String value) {
}
public int getSubscriberCount() {
return 0;
}
public void setSubscriberCount(int subscriberCount) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.DOCUMENT_TYPE;
import static org.apache.nifi.processors.couchbase.CouchbaseAttributes.Exception;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_RETRY;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import com.couchbase.client.deps.io.netty.buffer.Unpooled;
import com.couchbase.client.java.document.BinaryDocument;
import org.apache.nifi.attribute.expression.language.exception.AttributeExpressionLanguageException;
import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
import org.apache.nifi.couchbase.DocumentType;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.core.ServiceNotAvailableException;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.PersistTo;
import com.couchbase.client.java.ReplicateTo;
import com.couchbase.client.java.document.RawJsonDocument;
import com.couchbase.client.java.error.DurabilityException;
public class TestPutCouchbaseKey {
private static final String SERVICE_ID = "couchbaseClusterService";
private TestRunner testRunner;
@Before
public void init() throws Exception {
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.PutCouchbaseKey", "debug");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.TestPutCouchbaseKey", "debug");
testRunner = TestRunners.newTestRunner(PutCouchbaseKey.class);
}
private void setupMockBucket(Bucket bucket) throws InitializationException {
CouchbaseClusterControllerService service = mock(CouchbaseClusterControllerService.class);
when(service.getIdentifier()).thenReturn(SERVICE_ID);
when(service.openBucket(anyString())).thenReturn(bucket);
when(bucket.name()).thenReturn("bucket-1");
testRunner.addControllerService(SERVICE_ID, service);
testRunner.enableControllerService(service);
testRunner.setProperty(COUCHBASE_CLUSTER_SERVICE, SERVICE_ID);
}
@Test
public void testStaticDocId() throws Exception {
String bucketName = "bucket-1";
String docId = "doc-a";
int expiry = 100;
long cas = 200L;
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(docId, expiry, inFileData, cas));
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(BUCKET_NAME, bucketName);
testRunner.setProperty(DOC_ID, docId);
testRunner.run();
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
}
@Test
public void testBinaryDoc() throws Exception {
String bucketName = "bucket-1";
String docId = "doc-a";
int expiry = 100;
long cas = 200L;
String inFileData = "12345";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(BinaryDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(BinaryDocument.create(docId, expiry, Unpooled.copiedBuffer(inFileData.getBytes(StandardCharsets.UTF_8)), cas));
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(BUCKET_NAME, bucketName);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(DOCUMENT_TYPE, DocumentType.Binary.name());
testRunner.run();
verify(bucket, times(1)).upsert(any(BinaryDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
}
@Test
public void testDurabilityConstraint() throws Exception {
String docId = "doc-a";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.MASTER), eq(ReplicateTo.ONE)))
.thenReturn(RawJsonDocument.create(docId, inFileData));
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(PutCouchbaseKey.PERSIST_TO, PersistTo.MASTER.toString());
testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
testRunner.run();
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.MASTER), eq(ReplicateTo.ONE));
testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
}
@Test
public void testDocIdExp() throws Exception {
String docIdExp = "${'someProperty'}";
String somePropertyValue = "doc-p";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
Map<String, String> properties = new HashMap<>();
properties.put("someProperty", somePropertyValue);
testRunner.enqueue(inFileDataBytes, properties);
testRunner.run();
ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
assertEquals(somePropertyValue, capture.getValue().id());
assertEquals(inFileData, capture.getValue().content());
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
}
@Test
public void testInvalidDocIdExp() throws Exception {
String docIdExp = "${invalid_function(someProperty)}";
String somePropertyValue = "doc-p";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
Map<String, String> properties = new HashMap<>();
properties.put("someProperty", somePropertyValue);
testRunner.enqueue(inFileDataBytes, properties);
try {
testRunner.run();
fail("Exception should be thrown.");
} catch (AssertionError e){
Assert.assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
}
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testInputFlowFileUuid() throws Exception {
String uuid = "00029362-5106-40e8-b8a9-bf2cecfbc0d7";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(uuid, inFileData));
setupMockBucket(bucket);
Map<String, String> properties = new HashMap<>();
properties.put(CoreAttributes.UUID.key(), uuid);
testRunner.enqueue(inFileDataBytes, properties);
testRunner.run();
ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
assertEquals(inFileData, capture.getValue().content());
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
}
@Test
public void testCouchbaseFailure() throws Exception {
String docId = "doc-a";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
.thenThrow(new ServiceNotAvailableException());
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
try {
testRunner.run();
fail("ProcessException should be thrown.");
} catch (AssertionError e){
Assert.assertTrue(e.getCause().getClass().equals(ProcessException.class));
}
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
testRunner.assertAllFlowFilesTransferred(REL_FAILURE);
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testCouchbaseTempFlowFileError() throws Exception {
String docId = "doc-a";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
CouchbaseException exception = new DurabilityException();
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
.thenThrow(exception);
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
testRunner.run();
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_RETRY, 1);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
orgFile.assertContentEquals(inFileData);
orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
}
}
| |
package mkl.testarea.pdfbox2.content;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
import org.apache.pdfbox.pdmodel.font.Standard14Fonts.FontName;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* @author mkl
*/
public class BreakLongString
{
final static File RESULT_FOLDER = new File("target/test-outputs", "content");
@BeforeClass
public static void setUpBeforeClass() throws Exception
{
RESULT_FOLDER.mkdirs();
}
/**
* <a href="http://stackoverflow.com/questions/19635275/how-to-generate-multiple-lines-in-pdf-using-apache-pdfbox">
* How to generate multiple lines in PDF using Apache pdfbox
* </a>
* <p>
* This test shows how to split a long string into multiple lines.
* </p>
*/
@Test
public void testBreakString() throws IOException
{
PDDocument doc = null;
try
{
doc = new PDDocument();
PDPage page = new PDPage();
doc.addPage(page);
PDPageContentStream contentStream = new PDPageContentStream(doc, page);
PDFont pdfFont = new PDType1Font(FontName.HELVETICA);
float fontSize = 25;
float leading = 1.5f * fontSize;
PDRectangle mediabox = page.getMediaBox();
float margin = 72;
float width = mediabox.getWidth() - 2*margin;
float startX = mediabox.getLowerLeftX() + margin;
float startY = mediabox.getUpperRightY() - margin;
String text = "I am trying to create a PDF file with a lot of text contents in the document. I am using PDFBox";
List<String> lines = new ArrayList<String>();
int lastSpace = -1;
while (text.length() > 0)
{
int spaceIndex = text.indexOf(' ', lastSpace + 1);
if (spaceIndex < 0)
spaceIndex = text.length();
String subString = text.substring(0, spaceIndex);
float size = fontSize * pdfFont.getStringWidth(subString) / 1000;
System.out.printf("'%s' - %f of %f\n", subString, size, width);
if (size > width)
{
if (lastSpace < 0)
lastSpace = spaceIndex;
subString = text.substring(0, lastSpace);
lines.add(subString);
text = text.substring(lastSpace).trim();
System.out.printf("'%s' is line\n", subString);
lastSpace = -1;
}
else if (spaceIndex == text.length())
{
lines.add(text);
System.out.printf("'%s' is line\n", text);
text = "";
}
else
{
lastSpace = spaceIndex;
}
}
contentStream.beginText();
contentStream.setFont(pdfFont, fontSize);
contentStream.newLineAtOffset(startX, startY);
for (String line: lines)
{
contentStream.showText(line);
contentStream.newLineAtOffset(0, -leading);
}
contentStream.endText();
contentStream.close();
doc.save(new File(RESULT_FOLDER, "break-long-string.pdf"));
}
finally
{
if (doc != null)
{
doc.close();
}
}
}
/**
* <a href="http://stackoverflow.com/questions/20680430/is-it-possible-to-justify-text-in-pdfbox">
* Is it possible to justify text in PDFBOX?
* </a>
* <p>
* This test shows how to split a long string into multiple lines with justification.
* </p>
*/
@Test
public void testBreakStringJustified() throws IOException
{
PDDocument doc = null;
try
{
doc = new PDDocument();
PDPage page = new PDPage();
doc.addPage(page);
PDPageContentStream contentStream = new PDPageContentStream(doc, page);
PDFont pdfFont = new PDType1Font(FontName.HELVETICA);
float fontSize = 25;
float leading = 1.5f * fontSize;
PDRectangle mediabox = page.getMediaBox();
float margin = 72;
float width = mediabox.getWidth() - 2*margin;
float startX = mediabox.getLowerLeftX() + margin;
float startY = mediabox.getUpperRightY() - margin - pdfFont.getBoundingBox().getUpperRightY() * fontSize / 1000 ;
String text = "I am trying to create a PDF file with a lot of text contents in the document. I am using PDFBox";
List<String> lines = new ArrayList<String>();
int lastSpace = -1;
while (text.length() > 0)
{
int spaceIndex = text.indexOf(' ', lastSpace + 1);
if (spaceIndex < 0)
spaceIndex = text.length();
String subString = text.substring(0, spaceIndex);
float size = fontSize * pdfFont.getStringWidth(subString) / 1000;
System.out.printf("'%s' - %f of %f\n", subString, size, width);
if (size > width)
{
if (lastSpace < 0)
lastSpace = spaceIndex;
subString = text.substring(0, lastSpace);
lines.add(subString);
text = text.substring(lastSpace).trim();
System.out.printf("'%s' is line\n", subString);
lastSpace = -1;
}
else if (spaceIndex == text.length())
{
lines.add(text);
System.out.printf("'%s' is line\n", text);
text = "";
}
else
{
lastSpace = spaceIndex;
}
}
contentStream.beginText();
contentStream.setFont(pdfFont, fontSize);
contentStream.newLineAtOffset(startX, startY);
for (String line: lines)
{
float charSpacing = 0;
if (line.length() > 1)
{
float size = fontSize * pdfFont.getStringWidth(line) / 1000;
float free = width - size;
if (free > 0)
{
charSpacing = free / (line.length() - 1);
}
}
contentStream.setCharacterSpacing(charSpacing);
contentStream.showText(line);
contentStream.newLineAtOffset(0, -leading);
}
contentStream.endText();
contentStream.close();
doc.save(new File(RESULT_FOLDER, "break-long-string-justified.pdf"));
}
finally
{
if (doc != null)
{
doc.close();
}
}
}
/**
* <a href="http://stackoverflow.com/questions/19635275/how-to-generate-multiple-lines-in-pdf-using-apache-pdfbox">
* How to generate multiple lines in PDF using Apache pdfbox
* </a>
* <p>
* This test shows how to split a long string into multiple lines while additionally
* honoring newline characters as unconditional line breaks.
* </p>
*/
@Test
public void testBreakStringNL() throws IOException
{
PDDocument doc = null;
try
{
doc = new PDDocument();
PDPage page = new PDPage();
doc.addPage(page);
PDPageContentStream contentStream = new PDPageContentStream(doc, page);
PDFont pdfFont = new PDType1Font(FontName.HELVETICA);
float fontSize = 25;
float leading = 1.5f * fontSize;
PDRectangle mediabox = page.getMediaBox();
float margin = 72;
float width = mediabox.getWidth() - 2*margin;
float startX = mediabox.getLowerLeftX() + margin;
float startY = mediabox.getUpperRightY() - margin;
String textNL = "I am trying to create a PDF file with a lot of text contents in the document. I am using PDFBox.\nFurthermore, I have added some newline characters to the string at which lines also shall be broken.\nIt should work alright like this...";
List<String> lines = new ArrayList<String>();
for (String text : textNL.split("\n"))
{
int lastSpace = -1;
while (text.length() > 0)
{
int spaceIndex = text.indexOf(' ', lastSpace + 1);
if (spaceIndex < 0)
spaceIndex = text.length();
String subString = text.substring(0, spaceIndex);
float size = fontSize * pdfFont.getStringWidth(subString) / 1000;
System.out.printf("'%s' - %f of %f\n", subString, size, width);
if (size > width)
{
if (lastSpace < 0)
lastSpace = spaceIndex;
subString = text.substring(0, lastSpace);
lines.add(subString);
text = text.substring(lastSpace).trim();
System.out.printf("'%s' is line\n", subString);
lastSpace = -1;
}
else if (spaceIndex == text.length())
{
lines.add(text);
System.out.printf("'%s' is line\n", text);
text = "";
}
else
{
lastSpace = spaceIndex;
}
}
}
contentStream.beginText();
contentStream.setFont(pdfFont, fontSize);
contentStream.newLineAtOffset(startX, startY);
for (String line: lines)
{
contentStream.showText(line);
contentStream.newLineAtOffset(0, -leading);
}
contentStream.endText();
contentStream.close();
doc.save(new File(RESULT_FOLDER, "break-long-string-with-NL.pdf"));
}
finally
{
if (doc != null)
{
doc.close();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.tserver.log;
import static java.util.Collections.singletonList;
import java.io.IOException;
import java.nio.channels.ClosedChannelException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.accumulo.core.client.Durability;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.protobuf.ProtobufUtil;
import org.apache.accumulo.core.replication.ReplicationConfigurationUtil;
import org.apache.accumulo.core.util.SimpleThreadPool;
import org.apache.accumulo.fate.util.LoggingRunnable;
import org.apache.accumulo.fate.util.Retry;
import org.apache.accumulo.fate.util.Retry.RetryFactory;
import org.apache.accumulo.server.fs.VolumeManager;
import org.apache.accumulo.server.replication.StatusUtil;
import org.apache.accumulo.server.replication.proto.Replication.Status;
import org.apache.accumulo.server.util.Halt;
import org.apache.accumulo.server.util.ReplicationTableUtil;
import org.apache.accumulo.tserver.TabletMutations;
import org.apache.accumulo.tserver.TabletServer;
import org.apache.accumulo.tserver.log.DfsLogger.LoggerOperation;
import org.apache.accumulo.tserver.log.DfsLogger.ServerResources;
import org.apache.accumulo.tserver.tablet.CommitSession;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Central logging facility for the TServerInfo.
*
* Forwards in-memory updates to remote logs, carefully writing the same data to every log, while
* maintaining the maximum thread parallelism for greater performance. As new logs are used and
* minor compactions are performed, the metadata table is kept up-to-date.
*
*/
public class TabletServerLogger {
private static final Logger log = LoggerFactory.getLogger(TabletServerLogger.class);
private final AtomicLong logSizeEstimate = new AtomicLong();
private final long maxSize;
private final long maxAge;
private final TabletServer tserver;
// The current logger
private DfsLogger currentLog = null;
private final SynchronousQueue<Object> nextLog = new SynchronousQueue<>();
private ThreadPoolExecutor nextLogMaker;
// The current generation of logs.
// Because multiple threads can be using a log at one time, a log
// failure is likely to affect multiple threads, who will all attempt to
// create a new log. This will cause many unnecessary updates to the
// metadata table.
// We'll use this generational counter to determine if another thread has
// already fetched a new log.
private final AtomicInteger logId = new AtomicInteger();
// Use a ReadWriteLock to allow multiple threads to use the log set, but obtain a write lock to
// change them
private final ReentrantReadWriteLock logIdLock = new ReentrantReadWriteLock();
private final AtomicLong syncCounter;
private final AtomicLong flushCounter;
private long createTime = 0;
private final RetryFactory createRetryFactory;
private Retry createRetry = null;
private final RetryFactory writeRetryFactory;
private abstract static class TestCallWithWriteLock {
abstract boolean test();
abstract void withWriteLock() throws IOException;
}
/**
* Pattern taken from the documentation for ReentrantReadWriteLock
*
* @param rwlock
* lock to use
* @param code
* a test/work pair
*/
private static void testLockAndRun(final ReadWriteLock rwlock, TestCallWithWriteLock code)
throws IOException {
// Get a read lock
rwlock.readLock().lock();
try {
// does some condition exist that needs the write lock?
if (code.test()) {
// Yes, let go of the readLock
rwlock.readLock().unlock();
// Grab the write lock
rwlock.writeLock().lock();
try {
// double-check the condition, since we let go of the lock
if (code.test()) {
// perform the work with with write lock held
code.withWriteLock();
}
} finally {
// regain the readLock
rwlock.readLock().lock();
// unlock the write lock
rwlock.writeLock().unlock();
}
}
} finally {
// always let go of the lock
rwlock.readLock().unlock();
}
}
public TabletServerLogger(TabletServer tserver, long maxSize, AtomicLong syncCounter,
AtomicLong flushCounter, RetryFactory createRetryFactory, RetryFactory writeRetryFactory,
long maxAge) {
this.tserver = tserver;
this.maxSize = maxSize;
this.syncCounter = syncCounter;
this.flushCounter = flushCounter;
this.createRetryFactory = createRetryFactory;
this.createRetry = null;
this.writeRetryFactory = writeRetryFactory;
this.maxAge = maxAge;
}
private DfsLogger initializeLoggers(final AtomicInteger logIdOut) throws IOException {
final AtomicReference<DfsLogger> result = new AtomicReference<>();
testLockAndRun(logIdLock, new TestCallWithWriteLock() {
@Override
boolean test() {
result.set(currentLog);
if (currentLog != null)
logIdOut.set(logId.get());
return currentLog == null;
}
@Override
void withWriteLock() {
createLogger();
result.set(currentLog);
if (currentLog != null)
logIdOut.set(logId.get());
else
logIdOut.set(-1);
}
});
return result.get();
}
/**
* Get the current WAL file
*
* @return The name of the current log, or null if there is no current log.
*/
public String getLogFile() {
logIdLock.readLock().lock();
try {
if (currentLog == null) {
return null;
}
return currentLog.getFileName();
} finally {
logIdLock.readLock().unlock();
}
}
private synchronized void createLogger() {
if (!logIdLock.isWriteLockedByCurrentThread()) {
throw new IllegalStateException("createLoggers should be called with write lock held!");
}
if (currentLog != null) {
throw new IllegalStateException("createLoggers should not be called when current log is set");
}
try {
startLogMaker();
Object next = nextLog.take();
if (next instanceof Exception) {
throw (Exception) next;
}
if (next instanceof DfsLogger) {
currentLog = (DfsLogger) next;
logId.incrementAndGet();
log.info("Using next log {}", currentLog.getFileName());
// When we successfully create a WAL, make sure to reset the Retry.
if (createRetry != null) {
createRetry = null;
}
this.createTime = System.currentTimeMillis();
return;
} else {
throw new RuntimeException("Error: unexpected type seen: " + next);
}
} catch (Exception t) {
if (createRetry == null) {
createRetry = createRetryFactory.createRetry();
}
// We have more retries or we exceeded the maximum number of accepted failures
if (createRetry.canRetry()) {
// Use the createRetry and record the time in which we did so
createRetry.useRetry();
try {
// Backoff
createRetry.waitForNextAttempt();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
} else {
log.error("Repeatedly failed to create WAL. Going to exit tabletserver.", t);
// We didn't have retries or we failed too many times.
Halt.halt("Experienced too many errors creating WALs, giving up", 1);
}
// The exception will trigger the log creation to be re-attempted.
throw new RuntimeException(t);
}
}
private synchronized void startLogMaker() {
if (nextLogMaker != null) {
return;
}
nextLogMaker = new SimpleThreadPool(1, "WALog creator");
nextLogMaker.submit(new LoggingRunnable(log, new Runnable() {
@Override
public void run() {
final ServerResources conf = tserver.getServerConfig();
final VolumeManager fs = conf.getFileSystem();
while (!nextLogMaker.isShutdown()) {
log.debug("Creating next WAL");
DfsLogger alog = null;
try {
alog = new DfsLogger(tserver.getContext(), conf, syncCounter, flushCounter);
alog.open(tserver.getClientAddressString());
} catch (Exception t) {
log.error("Failed to open WAL", t);
// the log is not advertised in ZK yet, so we can just delete it if it exists
if (alog != null) {
try {
alog.close();
} catch (Exception e) {
log.error("Failed to close WAL after it failed to open", e);
}
try {
Path path = alog.getPath();
if (fs.exists(path)) {
fs.delete(path);
}
} catch (Exception e) {
log.warn("Failed to delete a WAL that failed to open", e);
}
}
try {
nextLog.offer(t, 12, TimeUnit.HOURS);
} catch (InterruptedException ex) {
// ignore
}
continue;
}
String fileName = alog.getFileName();
log.debug("Created next WAL {}", fileName);
try {
tserver.addNewLogMarker(alog);
} catch (Exception t) {
log.error("Failed to add new WAL marker for " + fileName, t);
try {
// Intentionally not deleting walog because it may have been advertised in ZK. See
// #949
alog.close();
} catch (Exception e) {
log.error("Failed to close WAL after it failed to open", e);
}
// it's possible the log was advertised in ZK even though we got an
// exception. If there's a chance the WAL marker may have been created,
// this will ensure it's closed. Either the close will be written and
// the GC will clean it up, or the tserver is about to die due to sesson
// expiration and the GC will also clean it up.
try {
tserver.walogClosed(alog);
} catch (Exception e) {
log.error("Failed to close WAL that failed to open: " + fileName, e);
}
try {
nextLog.offer(t, 12, TimeUnit.HOURS);
} catch (InterruptedException ex) {
// ignore
}
continue;
}
try {
while (!nextLog.offer(alog, 12, TimeUnit.HOURS)) {
log.info("Our WAL was not used for 12 hours: {}", fileName);
}
} catch (InterruptedException e) {
// ignore - server is shutting down
}
}
}
}));
}
private synchronized void close() throws IOException {
if (!logIdLock.isWriteLockedByCurrentThread()) {
throw new IllegalStateException("close should be called with write lock held!");
}
try {
if (currentLog != null) {
try {
currentLog.close();
} catch (DfsLogger.LogClosedException ex) {
// ignore
} catch (Throwable ex) {
log.error("Unable to cleanly close log " + currentLog.getFileName() + ": " + ex, ex);
} finally {
this.tserver.walogClosed(currentLog);
}
currentLog = null;
logSizeEstimate.set(0);
}
} catch (Throwable t) {
throw new IOException(t);
}
}
interface Writer {
LoggerOperation write(DfsLogger logger) throws Exception;
}
private void write(final Collection<CommitSession> sessions, boolean mincFinish, Writer writer,
Retry writeRetry) throws IOException {
// Work very hard not to lock this during calls to the outside world
int currentLogId = logId.get();
boolean success = false;
while (!success) {
try {
// get a reference to the loggers that no other thread can touch
AtomicInteger currentId = new AtomicInteger(-1);
DfsLogger copy = initializeLoggers(currentId);
currentLogId = currentId.get();
// add the logger to the log set for the memory in the tablet,
// update the metadata table if we've never used this tablet
if (currentLogId == logId.get()) {
for (CommitSession commitSession : sessions) {
if (commitSession.beginUpdatingLogsUsed(copy, mincFinish)) {
try {
// Scribble out a tablet definition and then write to the metadata table
write(singletonList(commitSession), false,
logger -> logger.defineTablet(commitSession), writeRetry);
} finally {
commitSession.finishUpdatingLogsUsed();
}
// Need to release
KeyExtent extent = commitSession.getExtent();
if (ReplicationConfigurationUtil.isEnabled(extent,
tserver.getTableConfiguration(extent))) {
Status status = StatusUtil.openWithUnknownLength(System.currentTimeMillis());
log.debug("Writing " + ProtobufUtil.toString(status) + " to metadata table for "
+ copy.getFileName());
// Got some new WALs, note this in the metadata table
ReplicationTableUtil.updateFiles(tserver.getContext(), commitSession.getExtent(),
copy.getFileName(), status);
}
}
}
}
// Make sure that the logs haven't changed out from underneath our copy
if (currentLogId == logId.get()) {
// write the mutation to the logs
LoggerOperation lop = writer.write(copy);
lop.await();
// double-check: did the log set change?
success = (currentLogId == logId.get());
}
} catch (DfsLogger.LogClosedException | ClosedChannelException ex) {
writeRetry.logRetry(log, "Logs closed while writing", ex);
} catch (Exception t) {
writeRetry.logRetry(log, "Failed to write to WAL", t);
try {
// Backoff
writeRetry.waitForNextAttempt();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
} finally {
writeRetry.useRetry();
}
// Some sort of write failure occurred. Grab the write lock and reset the logs.
// But since multiple threads will attempt it, only attempt the reset when
// the logs haven't changed.
final int finalCurrent = currentLogId;
if (!success) {
testLockAndRun(logIdLock, new TestCallWithWriteLock() {
@Override
boolean test() {
return finalCurrent == logId.get();
}
@Override
void withWriteLock() throws IOException {
close();
}
});
}
}
// if the log gets too big or too old, reset it .. grab the write lock first
logSizeEstimate.addAndGet(4 * 3); // event, tid, seq overhead
testLockAndRun(logIdLock, new TestCallWithWriteLock() {
@Override
boolean test() {
return (logSizeEstimate.get() > maxSize)
|| ((System.currentTimeMillis() - createTime) > maxAge);
}
@Override
void withWriteLock() throws IOException {
close();
}
});
}
/**
* Log a single mutation. This method expects mutations that have a durability other than NONE.
*/
public void log(final CommitSession commitSession, final Mutation m, final Durability durability)
throws IOException {
if (durability == Durability.DEFAULT || durability == Durability.NONE) {
throw new IllegalArgumentException("Unexpected durability " + durability);
}
write(singletonList(commitSession), false, logger -> logger.log(commitSession, m, durability),
writeRetryFactory.createRetry());
logSizeEstimate.addAndGet(m.numBytes());
}
/**
* Log mutations. This method expects mutations that have a durability other than NONE.
*/
public void logManyTablets(Map<CommitSession,TabletMutations> loggables) throws IOException {
if (loggables.size() == 0)
return;
write(loggables.keySet(), false, logger -> logger.logManyTablets(loggables.values()),
writeRetryFactory.createRetry());
for (TabletMutations entry : loggables.values()) {
if (entry.getMutations().size() < 1) {
throw new IllegalArgumentException("logManyTablets: logging empty mutation list");
}
for (Mutation m : entry.getMutations()) {
logSizeEstimate.addAndGet(m.numBytes());
}
}
}
public void minorCompactionFinished(final CommitSession commitSession, final long walogSeq,
final Durability durability) throws IOException {
write(singletonList(commitSession), true,
logger -> logger.minorCompactionFinished(walogSeq, commitSession.getLogId(), durability),
writeRetryFactory.createRetry());
}
public long minorCompactionStarted(final CommitSession commitSession, final long seq,
final String fullyQualifiedFileName, final Durability durability) throws IOException {
write(
singletonList(commitSession), false, logger -> logger.minorCompactionStarted(seq,
commitSession.getLogId(), fullyQualifiedFileName, durability),
writeRetryFactory.createRetry());
return seq;
}
public void recover(VolumeManager fs, KeyExtent extent, List<Path> logs, Set<String> tabletFiles,
MutationReceiver mr) throws IOException {
try {
SortedLogRecovery recovery = new SortedLogRecovery(fs);
recovery.recover(extent, logs, tabletFiles, mr);
} catch (Exception e) {
throw new IOException(e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCompute;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.compute.ComputeJob;
import org.apache.ignite.compute.ComputeJobAdapter;
import org.apache.ignite.compute.ComputeJobResult;
import org.apache.ignite.compute.ComputeTaskAdapter;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.GridConcurrentHashSet;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.resources.LoggerResource;
import org.apache.ignite.spi.collision.jobstealing.JobStealingCollisionSpi;
import org.apache.ignite.spi.failover.jobstealing.JobStealingFailoverSpi;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
/**
* Multithreaded job stealing test.
*/
@GridCommonTest(group = "Kernal Self")
public class GridMultithreadedJobStealingSelfTest extends GridCommonAbstractTest {
/** */
private Ignite ignite;
/** */
private static volatile CountDownLatch jobExecutedLatch;
/** */
public GridMultithreadedJobStealingSelfTest() {
super(false /* don't start grid*/);
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
ignite = startGridsMultiThreaded(2);
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
ignite = null;
stopAllGrids();
}
/**
* Test 2 jobs on 2 nodes.
*
* @throws Exception If test failed.
*/
@Test
public void testTwoJobsMultithreaded() throws Exception {
final AtomicReference<Exception> fail = new AtomicReference<>(null);
final AtomicInteger stolen = new AtomicInteger(0);
final AtomicInteger noneStolen = new AtomicInteger(0);
final GridConcurrentHashSet nodes = new GridConcurrentHashSet();
int threadsNum = 10;
GridTestUtils.runMultiThreaded(new Runnable() {
/** */
@Override public void run() {
try {
JobStealingResult res = ignite.compute().execute(new JobStealingTask(2), null);
info("Task result: " + res);
stolen.addAndGet(res.stolen);
noneStolen.addAndGet(res.nonStolen);
nodes.addAll(res.nodes);
}
catch (IgniteException e) {
log.error("Failed to execute task.", e);
fail.getAndSet(e);
}
}
}, threadsNum, "JobStealingThread");
for (Ignite g : G.allGrids())
info("Metrics [nodeId=" + g.cluster().localNode().id() +
", metrics=" + g.cluster().localNode().metrics() + ']');
assertNull("Test failed with exception: ",fail.get());
// Total jobs number is threadsNum * 2
assertEquals("Incorrect processed jobs number",threadsNum * 2, stolen.get() + noneStolen.get());
assertFalse( "No jobs were stolen.",stolen.get() == 0);
for (Ignite g : G.allGrids())
assertTrue("Node get no jobs.", nodes.contains(g.name()));
// Under these circumstances we should not have more than 2 jobs
// difference.
//(but muted to 4 due to very rare fails and low priority of fix)
assertTrue( "Stats [stolen=" + stolen + ", noneStolen=" + noneStolen + ']',
Math.abs(stolen.get() - noneStolen.get()) <= 4);
}
/**
* Test newly joined node can steal jobs.
*
* @throws Exception If test failed.
*/
@Test
public void testJoinedNodeCanStealJobs() throws Exception {
final AtomicReference<Exception> fail = new AtomicReference<>(null);
final AtomicInteger stolen = new AtomicInteger(0);
final AtomicInteger noneStolen = new AtomicInteger(0);
final GridConcurrentHashSet nodes = new GridConcurrentHashSet();
int threadsNum = 10;
final int jobsPerTask = 4;
jobExecutedLatch = new CountDownLatch(threadsNum);
final IgniteInternalFuture<Long> future = GridTestUtils.runMultiThreadedAsync(new Runnable() {
/** */
@Override public void run() {
try {
final IgniteCompute compute = ignite.compute().withAsync();
compute.execute(new JobStealingTask(jobsPerTask), null);
JobStealingResult res = (JobStealingResult)compute.future().get();
info("Task result: " + res);
stolen.addAndGet(res.stolen);
noneStolen.addAndGet(res.nonStolen);
nodes.addAll(res.nodes);
}
catch (IgniteException e) {
log.error("Failed to execute task.", e);
fail.getAndSet(e);
}
}
}, threadsNum, "JobStealingThread");
//Wait for first job begin execution.
jobExecutedLatch.await();
startGrid(2);
for (Ignite g : G.allGrids())
info("Metrics [nodeId=" + g.cluster().localNode().id() +
", metrics=" + g.cluster().localNode().metrics() + ']');
future.get();
assertNull("Test failed with exception: ",fail.get());
// Total jobs number is threadsNum * 3
assertEquals("Incorrect processed jobs number",threadsNum * jobsPerTask, stolen.get() + noneStolen.get());
assertFalse( "No jobs were stolen.",stolen.get() == 0);
for (Ignite g : G.allGrids())
assertTrue("Node get no jobs.", nodes.contains(g.name()));
assertTrue( "Stats [stolen=" + stolen + ", noneStolen=" + noneStolen + ']',
Math.abs(stolen.get() - 2 * noneStolen.get()) <= 6);
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
JobStealingCollisionSpi colSpi = new JobStealingCollisionSpi();
// One job at a time.
colSpi.setActiveJobsThreshold(1);
colSpi.setWaitJobsThreshold(0);
JobStealingFailoverSpi failSpi = new JobStealingFailoverSpi();
// Verify defaults.
assert failSpi.getMaximumFailoverAttempts() == JobStealingFailoverSpi.DFLT_MAX_FAILOVER_ATTEMPTS;
cfg.setCollisionSpi(colSpi);
cfg.setFailoverSpi(failSpi);
return cfg;
}
/**
* Job stealing task.
*/
private static class JobStealingTask extends ComputeTaskAdapter<Object, JobStealingResult> {
/** Grid. */
@IgniteInstanceResource
private Ignite ignite;
/** Logger. */
@LoggerResource
private IgniteLogger log;
/** */
private int jobsToRun;
/** */
public JobStealingTask(int jobsToRun) {
this.jobsToRun = jobsToRun;
}
/** {@inheritDoc} */
@Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid,
@Nullable Object arg) {
assert subgrid.size() == 2 : "Invalid subgrid size: " + subgrid.size();
Map<ComputeJobAdapter, ClusterNode> map = new HashMap<>(subgrid.size());
// Put all jobs onto local node.
for (int i = 0; i < jobsToRun; i++)
map.put(new GridJobStealingJob(3000L), ignite.cluster().localNode());
return map;
}
/** {@inheritDoc} */
@Override public JobStealingResult reduce(List<ComputeJobResult> results) {
int stolen = 0;
int nonStolen = 0;
Set<String> nodes = new HashSet<>(results.size());
for (ComputeJobResult res : results) {
String data = res.getData();
log.info("Job result: " + data);
nodes.add(data);
if (!data.equals(ignite.name()))
stolen++;
else
nonStolen++;
}
return new JobStealingResult(stolen, nonStolen, nodes);
}
}
/**
* Job stealing job.
*/
private static final class GridJobStealingJob extends ComputeJobAdapter {
/** Injected grid. */
@IgniteInstanceResource
private Ignite ignite;
/**
* @param arg Job argument.
*/
GridJobStealingJob(Long arg) {
super(arg);
}
/** {@inheritDoc} */
@Override public Serializable execute() {
try {
jobExecutedLatch.countDown();
Long sleep = argument(0);
assert sleep != null;
Thread.sleep(sleep);
}
catch (InterruptedException e) {
throw new IgniteException("Job got interrupted.", e);
}
return ignite.name();
}
}
/**
* Job stealing result.
*/
private static class JobStealingResult {
/** */
int stolen;
/** */
int nonStolen;
/** */
Set nodes;
/** */
public JobStealingResult(int stolen, int nonStolen, Set nodes) {
this.stolen = stolen;
this.nonStolen = nonStolen;
this.nodes = nodes;
}
/** {@inheritDoc} */
@Override public String toString() {
return "JobStealingResult{" +
"stolen=" + stolen +
", nonStolen=" + nonStolen +
", nodes=" + Arrays.toString(nodes.toArray()) +
'}';
}
}
}
| |
/*
Copyright 2016 Mark Gunlogson
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.mgunlogson.cuckoofilter4j;
import static org.junit.Assert.*;
import java.util.HashSet;
import org.junit.Test;
import com.github.mgunlogson.cuckoofilter4j.FilterTable;
import com.google.common.testing.ClassSanityTester;
import com.google.common.testing.EqualsTester;
import com.google.common.testing.SerializableTester;
public class TestFilterTable {
@Test(expected = IllegalArgumentException.class)
public void testInvalidArgs() {
FilterTable.create(0, 100);
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidArgs2() {
FilterTable.create(5, 0);
}
// tag too short
@Test(expected = IllegalArgumentException.class)
public void testInvalidArgs3() {
FilterTable.create(4, 100);
}
@Test(expected = IllegalArgumentException.class)
public void testTagTooBig() {
FilterTable.create(60, 100);
}
@Test
public void testSimpleReadWriteTag() {
FilterTable table = FilterTable.create(12, 1000);
int testTag = 0b00000000000000000000000000011111;
for (int posInBucket = 0; posInBucket < 4; posInBucket++) {
for (int bucket = 0; bucket < 1000; bucket++) {
table.writeTagNoClear(bucket, posInBucket, testTag);
}
for (int bucket = 0; bucket < 1000; bucket++) {
assertTrue(table.readTag(bucket, posInBucket) == testTag);
}
}
}
@Test
public void testSimpleDeleteTag() {
FilterTable table = FilterTable.create(12, 1000);
int testTag = 0b00000000000000000000000000011111;
// fill all bucket positions
for (int posInBucket = 0; posInBucket < 4; posInBucket++) {
for (int bucket = 0; bucket < 1000; bucket++) {
// switch tag around a bit on each bucket insert
int tagMutate = testTag >>> posInBucket;
table.writeTagNoClear(bucket, posInBucket, tagMutate);
}
}
// now delete all
for (int posInBucket = 0; posInBucket < 4; posInBucket++) {
for (int bucket = 0; bucket < 1000; bucket++) {
// switch tag around a bit on each bucket insert
int tagMutate = testTag >>> posInBucket;
table.deleteFromBucket(bucket, tagMutate);
}
}
// should be empty
for (int posInBucket = 0; posInBucket < 4; posInBucket++) {
for (int bucket = 0; bucket < 1000; bucket++) {
assertTrue(table.readTag(bucket, posInBucket) == 0);
}
}
}
@Test
public void testSimpleFindTag() {
FilterTable table = FilterTable.create(12, 1000);
int testTag = 0b00000000000000000000000000011111;
table.writeTagNoClear(1, 2, testTag);
assertFalse(table.findTag(2, 3, testTag));
assertTrue(table.findTag(1, 3, testTag));
assertTrue(table.findTag(3, 1, testTag));
table.writeTagNoClear(2, 2, testTag);
assertTrue(table.findTag(1, 2, testTag));
}
@Test
public void testOverFillBucket() {
int testTag = 0b00000000000000000000000000011111;
FilterTable table = FilterTable.create(12, 1000);
// buckets can hold 4 tags
assertTrue(table.insertToBucket(5, testTag));
assertTrue(table.insertToBucket(5, testTag));
assertTrue(table.insertToBucket(5, testTag));
assertTrue(table.insertToBucket(5, testTag));
assertFalse(table.insertToBucket(5, testTag));
}
@Test
public void testTagSwap() {
int testTag = 0b00000000000000000000000000011111;
FilterTable table = FilterTable.create(12, 1000);
// buckets can hold 4 tags
assertTrue(table.insertToBucket(5, testTag));
assertTrue(table.insertToBucket(5, testTag));
assertTrue(table.insertToBucket(5, testTag));
assertTrue(table.insertToBucket(5, testTag));
assertFalse(table.insertToBucket(5, testTag));
// make sure table will give me a tag and swap
long swap = table.swapRandomTagInBucket(5, 6);
assertTrue("swapped tag is " + swap + " expected " + testTag, swap == testTag);
assertTrue(table.findTag(5, 1, 6));
assertTrue(table.findTag(1, 5, 6));
}
@Test
public void testTagSwap2() {
FilterTable table = FilterTable.create(12, 1000);
// buckets can hold 4 tags
assertTrue(table.insertToBucket(5, 1L));
assertTrue(table.insertToBucket(5, 2L));
assertTrue(table.insertToBucket(5, 3L));
assertTrue(table.insertToBucket(5, 4L));
// make sure table will give me a tag and swap
long swap = 5;
for (int i = 0; i < 1000; i++) {
swap = table.swapRandomTagInBucket(5, swap);
}
HashSet<Long> tagVals = new HashSet<>();
tagVals.add(swap);
tagVals.add(table.readTag(5, 0));
tagVals.add(table.readTag(5, 1));
tagVals.add(table.readTag(5, 2));
tagVals.add(table.readTag(5, 3));
assertTrue(tagVals.size() == 5);
assertTrue(tagVals.contains(1L));
assertTrue(tagVals.contains(2L));
assertTrue(tagVals.contains(3L));
assertTrue(tagVals.contains(4L));
assertTrue(tagVals.contains(5L));
}
@Test
public void testBitBleedWithinBucket() {
int canaryTag = 0b11111111111111111111111111111111;
FilterTable table = FilterTable.create(12, 1000);
// buckets can hold 4 tags
table.writeTagNoClear(5, 0, canaryTag);
table.writeTagNoClear(5, 2, canaryTag);
assertTrue(table.readTag(5, 1) == 0);
assertTrue(table.readTag(5, 3) == 0);
}
@Test
public void testDeleteCorrectBits() {
int canaryTag = 0b111111111111;
FilterTable table = FilterTable.create(12, 1000);
// buckets can hold 4 tags
table.writeTagNoClear(5, 0, canaryTag);
table.writeTagNoClear(5, 1, canaryTag);
table.writeTagNoClear(5, 2, canaryTag);
table.writeTagNoClear(5, 3, canaryTag);
table.deleteTag(5, 1);
table.deleteTag(5, 2);
assertTrue(table.readTag(5, 1) == 0);
assertTrue(table.readTag(5, 2) == 0);
assertTrue(table.readTag(5, 0) == canaryTag);
assertTrue(table.readTag(5, 3) == canaryTag);
}
@Test
public void testBitBleedBetweenBuckets() {
int canaryTag = 0b11111111111111111111111111111111;
FilterTable table = FilterTable.create(12, 1000);
// buckets can hold 4 tags
table.writeTagNoClear(5, 0, canaryTag);
table.writeTagNoClear(5, 3, canaryTag);
// should be directly adjacent to positions we filled
assertTrue(table.readTag(4, 3) == 0);
assertTrue(table.readTag(6, 0) == 0);
}
@Test
public void testEquals() {
new EqualsTester().addEqualityGroup(FilterTable.create(12, 1000)).addEqualityGroup(FilterTable.create(13, 1000))
.addEqualityGroup(FilterTable.create(12, 2000)).testEquals();
}
@Test
public void testCopy() {
FilterTable table = FilterTable.create(12, 1000);
FilterTable tableCopy = table.copy();
assertTrue(tableCopy.equals(table));
assertNotSame(table, tableCopy);
}
@Test
public void autoTestNulls() {
// chose 15 for int so it passes checks
new ClassSanityTester().setDefault(int.class, 15).testNulls(FilterTable.class);
}
@Test
public void testSerialize() {
SerializableTester.reserializeAndAssert(FilterTable.create(12, 1000));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.webservices.builder;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import javax.wsdl.Definition;
import javax.wsdl.Import;
import javax.wsdl.Service;
import javax.wsdl.Types;
import javax.wsdl.WSDLException;
import javax.wsdl.extensions.ExtensibilityElement;
import javax.wsdl.extensions.ExtensionRegistry;
import javax.wsdl.extensions.UnknownExtensibilityElement;
import javax.wsdl.extensions.schema.Schema;
import javax.wsdl.factory.WSDLFactory;
import javax.wsdl.xml.WSDLLocator;
import javax.wsdl.xml.WSDLReader;
import javax.xml.namespace.QName;
import com.ibm.wsdl.extensions.PopulatedExtensionRegistry;
import com.ibm.wsdl.extensions.schema.SchemaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.geronimo.webservices.WebServiceContainer;
import org.apache.geronimo.common.DeploymentException;
import org.apache.geronimo.xbeans.wsdl.DefinitionsDocument;
import org.apache.geronimo.xbeans.wsdl.TDefinitions;
import org.apache.geronimo.xbeans.wsdl.TPort;
import org.apache.geronimo.xbeans.wsdl.TService;
import org.apache.geronimo.deployment.xmlbeans.XmlBeansUtil;
import org.apache.xmlbeans.SchemaField;
import org.apache.xmlbeans.SchemaGlobalElement;
import org.apache.xmlbeans.SchemaParticle;
import org.apache.xmlbeans.SchemaType;
import org.apache.xmlbeans.SchemaTypeSystem;
import org.apache.xmlbeans.XmlBeans;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlError;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.apache.xmlbeans.XmlOptions;
import org.apache.xmlbeans.impl.xb.xsdschema.SchemaDocument;
import org.w3c.dom.Element;
import org.xml.sax.EntityResolver;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* @version $Rev$ $Date$
*/
public class SchemaInfoBuilder {
private static final Logger log = LoggerFactory.getLogger(SchemaInfoBuilder.class);
private static final SchemaTypeSystem basicTypeSystem;
// private static final String[] errorNames = {"Error", "Warning", "Info"};
private static final String SOAP_NS = "http://schemas.xmlsoap.org/wsdl/soap/";
private static final QName ADDRESS_QNAME = new QName(SOAP_NS, "address");
private static final QName LOCATION_QNAME = new QName("", "location");
static {
InputStream is = WSDescriptorParser.class.getClassLoader().getResourceAsStream("META-INF/schema/soap_encoding_1_1.xsd");
if (is == null) {
throw new RuntimeException("Could not locate soap encoding schema");
}
ArrayList errors = new ArrayList();
XmlOptions xmlOptions = XmlBeansUtil.createXmlOptions(errors);
try {
SchemaDocument parsed = SchemaDocument.Factory.parse(is, xmlOptions);
if (errors.size() != 0) {
throw new XmlException(errors.toArray().toString());
}
basicTypeSystem = XmlBeans.compileXsd(new XmlObject[]{parsed}, XmlBeans.getBuiltinTypeSystem(), xmlOptions);
if (errors.size() > 0) {
throw new RuntimeException("Could not compile schema type system: errors: " + errors);
}
} catch (XmlException e) {
throw new RuntimeException("Could not compile schema type system", e);
} catch (IOException e) {
throw new RuntimeException("Could not compile schema type system", e);
} finally {
try {
is.close();
} catch (IOException ignore) {
// ignore
}
}
}
private final JarFile moduleFile;
private final Definition definition;
private final Stack uris = new Stack();
private final Map wsdlMap = new HashMap();
private final Map schemaTypeKeyToSchemaTypeMap;
private final Map complexTypeMap;
private final Map elementMap;
private final Map simpleTypeMap;
private final Map portMap;
public SchemaInfoBuilder(JarFile moduleFile, URI wsdlUri) throws DeploymentException {
this(moduleFile, wsdlUri, null, null);
}
public SchemaInfoBuilder(JarFile moduleFile, Definition definition) throws DeploymentException {
this(moduleFile, null, definition, null);
}
SchemaInfoBuilder(JarFile moduleFile, URI uri, SchemaTypeSystem schemaTypeSystem) throws DeploymentException {
this(moduleFile, uri, null, schemaTypeSystem);
}
SchemaInfoBuilder(JarFile moduleFile, URI uri, Definition definition, SchemaTypeSystem schemaTypeSystem) throws DeploymentException {
this.moduleFile = moduleFile;
if (uri != null) {
uris.push(uri);
if (definition == null && schemaTypeSystem == null) {
definition = readWsdl(moduleFile, uri);
}
} else if (definition != null) {
try {
uri = new URI(definition.getDocumentBaseURI());
uris.push(uri);
} catch (URISyntaxException e) {
throw new DeploymentException("Could not locate definition", e);
}
} else {
throw new DeploymentException("You must supply uri or definition");
}
if (schemaTypeSystem == null) {
schemaTypeSystem = compileSchemaTypeSystem(definition);
}
this.definition = definition;
schemaTypeKeyToSchemaTypeMap = buildSchemaTypeKeyToSchemaTypeMap(schemaTypeSystem);
complexTypeMap = buildComplexTypeMap();
simpleTypeMap = buildSimpleTypeMap();
elementMap = buildElementMap();
portMap = buildPortMap();
}
public Map getSchemaTypeKeyToSchemaTypeMap() {
return schemaTypeKeyToSchemaTypeMap;
}
public Definition getDefinition() {
return definition;
}
public Map getWsdlMap() {
return wsdlMap;
}
/**
* Find all the complex types in the previously constructed schema analysis.
* Put them in a map from complex type QName to schema fragment.
*
* @return map of complexType QName to schema fragment
*/
public Map getComplexTypesInWsdl() {
return complexTypeMap;
}
private Map buildComplexTypeMap() {
Map complexTypeMap = new HashMap();
for (Iterator iterator = schemaTypeKeyToSchemaTypeMap.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
SchemaTypeKey key = (SchemaTypeKey) entry.getKey();
if (!key.isSimpleType() && !key.isAnonymous()) {
QName qName = key.getqName();
SchemaType schemaType = (SchemaType) entry.getValue();
complexTypeMap.put(qName, schemaType);
}
}
return complexTypeMap;
}
public Map getElementToTypeMap() {
return elementMap;
}
private Map buildElementMap() {
Map elementToTypeMap = new HashMap();
for (Iterator iterator = schemaTypeKeyToSchemaTypeMap.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
SchemaTypeKey key = (SchemaTypeKey) entry.getKey();
if (key.isElement()) {
QName elementQName = key.getqName();
SchemaType schemaType = (SchemaType) entry.getValue();
QName typeQName = schemaType.getName();
elementToTypeMap.put(elementQName, typeQName);
}
}
return elementToTypeMap;
}
/**
* Gets a map of all the javax.wsdl.Port instance in the WSDL definition keyed by the port's QName
* <p/>
* WSDL 1.1 spec: 2.6 "The name attribute provides a unique name among all ports defined within in the enclosing WSDL document."
*
* @return Map of port QName to javax.wsdl.Port for that QName.
*/
public Map getPortMap() {
return portMap;
}
private Map buildPortMap() {
HashMap ports = new HashMap();
if (definition != null) {
Collection services = definition.getServices().values();
for (Iterator iterator = services.iterator(); iterator.hasNext();) {
Service service = (Service) iterator.next();
ports.putAll(service.getPorts());
}
}
return ports;
}
public Map getSimpleTypeMap() {
return simpleTypeMap;
}
private Map buildSimpleTypeMap() {
Map simpleTypeMap = new HashMap();
for (Iterator iterator = schemaTypeKeyToSchemaTypeMap.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
SchemaTypeKey key = (SchemaTypeKey) entry.getKey();
if (key.isSimpleType() && !key.isAnonymous()) {
QName qName = key.getqName();
SchemaType schemaType = (SchemaType) entry.getValue();
simpleTypeMap.put(qName, schemaType);
}
}
return simpleTypeMap;
}
public SchemaTypeSystem compileSchemaTypeSystem(Definition definition) throws DeploymentException {
List schemaList = new ArrayList();
addImportsFromDefinition(definition, schemaList);
// System.out.println("Schemas: " + schemaList);
Collection errors = new ArrayList();
XmlOptions xmlOptions = new XmlOptions();
xmlOptions.setErrorListener(errors);
xmlOptions.setEntityResolver(new JarEntityResolver());
XmlObject[] schemas = (XmlObject[]) schemaList.toArray(new XmlObject[schemaList.size()]);
try {
SchemaTypeSystem schemaTypeSystem = XmlBeans.compileXsd(schemas, basicTypeSystem, xmlOptions);
if (errors.size() > 0) {
boolean wasError = false;
for (Iterator iterator = errors.iterator(); iterator.hasNext();) {
XmlError xmlError = (XmlError) iterator.next();
if(xmlError.getSeverity() == XmlError.SEVERITY_ERROR) {
log.error(xmlError.toString(), xmlError);
wasError = true;
} else if(xmlError.getSeverity() == XmlError.SEVERITY_WARNING) {
log.warn(xmlError.toString(), xmlError);
} else if(xmlError.getSeverity() == XmlError.SEVERITY_INFO) {
log.debug(xmlError.toString(), xmlError);
}
}
if (wasError) {
throw new DeploymentException("Could not compile schema type system, see log for errors");
}
}
return schemaTypeSystem;
} catch (XmlException e) {
throw new DeploymentException("Could not compile schema type system: " + schemaList, e);
}
}
private void addImportsFromDefinition(Definition definition, List schemaList) throws DeploymentException {
Map namespaceMap = definition.getNamespaces();
Types types = definition.getTypes();
if (types != null) {
List schemas = types.getExtensibilityElements();
for (Iterator iterator = schemas.iterator(); iterator.hasNext();) {
Object o = iterator.next();
if (o instanceof Schema) {
Schema unknownExtensibilityElement = (Schema) o;
QName elementType = unknownExtensibilityElement.getElementType();
if (new QName("http://www.w3.org/2001/XMLSchema", "schema").equals(elementType)) {
Element element = unknownExtensibilityElement.getElement();
addSchemaElement(element, namespaceMap, schemaList);
}
} else if (o instanceof UnknownExtensibilityElement) {
//This is allegedly obsolete as of axis-wsdl4j-1.2-RC3.jar which includes the Schema extension above.
//The change notes imply that imported schemas should end up in Schema elements. They don't, so this is still needed.
UnknownExtensibilityElement unknownExtensibilityElement = (UnknownExtensibilityElement) o;
Element element = unknownExtensibilityElement.getElement();
String elementNamespace = element.getNamespaceURI();
String elementLocalName = element.getNodeName();
if ("http://www.w3.org/2001/XMLSchema".equals(elementNamespace) && "schema".equals(elementLocalName)) {
addSchemaElement(element, namespaceMap, schemaList);
}
}
}
}
Map imports = definition.getImports();
if (imports != null) {
for (Iterator iterator = imports.entrySet().iterator(); iterator.hasNext();) {
Map.Entry entry = (Map.Entry) iterator.next();
String namespaceURI = (String) entry.getKey();
List importList = (List) entry.getValue();
for (Iterator iterator1 = importList.iterator(); iterator1.hasNext();) {
Import anImport = (Import) iterator1.next();
//according to the 1.1 jwsdl mr shcema imports are supposed to show up here,
//but according to the 1.0 spec there is supposed to be no Definition.
Definition definition1 = anImport.getDefinition();
if (definition1 != null) {
try {
URI uri = new URI(definition1.getDocumentBaseURI());
uris.push(uri);
} catch (URISyntaxException e) {
throw new DeploymentException("Could not locate definition", e);
}
try {
addImportsFromDefinition(definition1, schemaList);
} finally {
uris.pop();
}
} else {
log.warn("Missing definition in import for namespace " + namespaceURI);
}
}
}
}
}
private void addSchemaElement(Element element, Map namespaceMap, List schemaList) throws DeploymentException {
try {
XmlObject xmlObject = parseWithNamespaces(element, namespaceMap);
schemaList.add(xmlObject);
} catch (XmlException e) {
throw new DeploymentException("Could not parse schema element", e);
}
}
static XmlObject parseWithNamespaces(Element element, Map namespaceMap) throws XmlException {
ArrayList errors = new ArrayList();
XmlOptions xmlOptions = XmlBeansUtil.createXmlOptions(errors);
SchemaDocument parsed = SchemaDocument.Factory.parse(element, xmlOptions);
if (errors.size() != 0) {
throw new XmlException(errors.toArray().toString());
}
XmlCursor cursor = parsed.newCursor();
try {
cursor.toFirstContentToken();
for (Iterator namespaces = namespaceMap.entrySet().iterator(); namespaces.hasNext();) {
Map.Entry entry = (Map.Entry) namespaces.next();
cursor.insertNamespace((String) entry.getKey(), (String) entry.getValue());
}
} finally {
cursor.dispose();
}
return parsed;
}
/**
* builds a map of SchemaTypeKey containing jaxrpc-style fake QName and context info to xmlbeans SchemaType object.
*
* @param schemaTypeSystem
* @return Map of SchemaTypeKey to xmlbeans SchemaType object.
*/
private Map buildSchemaTypeKeyToSchemaTypeMap(SchemaTypeSystem schemaTypeSystem) {
Map qnameMap = new HashMap();
SchemaType[] globalTypes = schemaTypeSystem.globalTypes();
for (int i = 0; i < globalTypes.length; i++) {
SchemaType globalType = globalTypes[i];
QName typeQName = globalType.getName();
addSchemaType(typeQName, globalType, false, qnameMap);
}
SchemaGlobalElement[] globalElements = schemaTypeSystem.globalElements();
for (int i = 0; i < globalElements.length; i++) {
SchemaGlobalElement globalElement = globalElements[i];
addElement(globalElement, null, qnameMap);
}
return qnameMap;
}
private void addElement(SchemaField element, SchemaTypeKey key, Map qnameMap) {
//TODO is this null if element is a ref?
QName elementName = element.getName();
String elementNamespace = elementName.getNamespaceURI();
//"" namespace means local element with elementFormDefault="unqualified"
if (elementNamespace == null || elementNamespace.equals("")) {
elementNamespace = key.getqName().getNamespaceURI();
}
String elementQNameLocalName;
SchemaTypeKey elementKey;
if (key == null) {
//top level. rule 2.a,
elementQNameLocalName = elementName.getLocalPart();
elementKey = new SchemaTypeKey(elementName, true, false, false, elementName);
} else {
//not top level. rule 2.b, key will be for enclosing Type.
QName enclosingTypeQName = key.getqName();
String enclosingTypeLocalName = enclosingTypeQName.getLocalPart();
elementQNameLocalName = enclosingTypeLocalName + ">" + elementName.getLocalPart();
QName subElementName = new QName(elementNamespace, elementQNameLocalName);
elementKey = new SchemaTypeKey(subElementName, true, false, true, elementName);
}
SchemaType schemaType = element.getType();
qnameMap.put(elementKey, schemaType);
// new Exception("Adding: " + elementKey.getqName().getLocalPart()).printStackTrace();
//check if it's an array. maxOccurs is null if unbounded
//element should always be a SchemaParticle... this is a workaround for XMLBEANS-137
if (element instanceof SchemaParticle) {
addArrayForms((SchemaParticle) element, elementKey.getqName(), qnameMap, schemaType);
} else {
log.warn("element is not a schemaParticle! " + element);
}
//now, name for type. Rule 1.b, type inside an element
String typeQNameLocalPart = ">" + elementQNameLocalName;
QName typeQName = new QName(elementNamespace, typeQNameLocalPart);
boolean isAnonymous = true;
addSchemaType(typeQName, schemaType, isAnonymous, qnameMap);
}
private void addSchemaType(QName typeQName, SchemaType schemaType, boolean anonymous, Map qnameMap) {
SchemaTypeKey typeKey = new SchemaTypeKey(typeQName, false, schemaType.isSimpleType(), anonymous, null);
qnameMap.put(typeKey, schemaType);
// new Exception("Adding: " + typeKey.getqName().getLocalPart()).printStackTrace();
//TODO xmlbeans recommends using summary info from getElementProperties and getAttributeProperties instead of traversing the content model by hand.
SchemaParticle schemaParticle = schemaType.getContentModel();
if (schemaParticle != null) {
addSchemaParticle(schemaParticle, typeKey, qnameMap);
}
}
private void addSchemaParticle(SchemaParticle schemaParticle, SchemaTypeKey key, Map qnameMap) {
if (schemaParticle.getParticleType() == SchemaParticle.ELEMENT) {
SchemaType elementType = schemaParticle.getType();
SchemaField element = elementType.getContainerField();
//element will be null if the type is defined elsewhere, such as a built in type.
if (element != null) {
addElement(element, key, qnameMap);
} else {
QName keyQName = key.getqName();
//TODO I can't distinguish between 3.a and 3.b, so generate names both ways.
//3.b
String localPart = schemaParticle.getName().getLocalPart();
QName elementName = new QName(keyQName.getNamespaceURI(), localPart);
addArrayForms(schemaParticle, elementName, qnameMap, elementType);
//3.a
localPart = keyQName.getLocalPart() + ">" + schemaParticle.getName().getLocalPart();
elementName = new QName(keyQName.getNamespaceURI(), localPart);
addArrayForms(schemaParticle, elementName, qnameMap, elementType);
}
} else {
try {
SchemaParticle[] children = schemaParticle.getParticleChildren();
for (int i = 0; i < children.length; i++) {
SchemaParticle child = children[i];
addSchemaParticle(child, key, qnameMap);
}
} catch (NullPointerException e) {
//ignore xmlbeans bug
}
}
}
private void addArrayForms(SchemaParticle schemaParticle, QName keyName, Map qnameMap, SchemaType elementType) {
//it may be a ref or a built in type. If it's an array (maxOccurs >1) form a type for it.
if (schemaParticle.getIntMaxOccurs() > 1) {
String maxOccurs = schemaParticle.getMaxOccurs() == null ? "unbounded" : "" + schemaParticle.getIntMaxOccurs();
int minOccurs = schemaParticle.getIntMinOccurs();
QName elementName = schemaParticle.getName();
String arrayQNameLocalName = keyName.getLocalPart() + "[" + minOccurs + "," + maxOccurs + "]";
String elementNamespace = elementName.getNamespaceURI();
if (elementNamespace == null || elementNamespace.equals("")) {
elementNamespace = keyName.getNamespaceURI();
}
QName arrayName = new QName(elementNamespace, arrayQNameLocalName);
SchemaTypeKey arrayKey = new SchemaTypeKey(arrayName, false, false, true, elementName);
//TODO not clear we want the schemaType as the value
qnameMap.put(arrayKey, elementType);
// new Exception("Adding: " + arrayKey.getqName().getLocalPart()).printStackTrace();
if (minOccurs == 1) {
arrayQNameLocalName = keyName.getLocalPart() + "[," + maxOccurs + "]";
arrayName = new QName(elementNamespace, arrayQNameLocalName);
arrayKey = new SchemaTypeKey(arrayName, false, false, true, elementName);
//TODO not clear we want the schemaType as the value
qnameMap.put(arrayKey, elementType);
}
}
}
public Definition readWsdl(JarFile moduleFile, URI wsdlURI) throws DeploymentException {
Definition definition;
WSDLFactory wsdlFactory;
try {
wsdlFactory = WSDLFactory.newInstance();
} catch (WSDLException e) {
throw new DeploymentException("Could not create WSDLFactory", e);
}
WSDLReader wsdlReaderNoImport = wsdlFactory.newWSDLReader();
wsdlReaderNoImport.setFeature("javax.wsdl.importDocuments", false);
ExtensionRegistry extensionRegistry = new PopulatedExtensionRegistry();
extensionRegistry.mapExtensionTypes(Types.class, SchemaConstants.Q_ELEM_XSD_1999,
UnknownExtensibilityElement.class);
extensionRegistry.registerDeserializer(Types.class, SchemaConstants.Q_ELEM_XSD_1999,
extensionRegistry.getDefaultDeserializer());
extensionRegistry.registerSerializer(Types.class, SchemaConstants.Q_ELEM_XSD_1999,
extensionRegistry.getDefaultSerializer());
extensionRegistry.mapExtensionTypes(Types.class, SchemaConstants.Q_ELEM_XSD_2000,
UnknownExtensibilityElement.class);
extensionRegistry.registerDeserializer(Types.class, SchemaConstants.Q_ELEM_XSD_2000,
extensionRegistry.getDefaultDeserializer());
extensionRegistry.registerSerializer(Types.class, SchemaConstants.Q_ELEM_XSD_2000,
extensionRegistry.getDefaultSerializer());
extensionRegistry.mapExtensionTypes(Types.class, SchemaConstants.Q_ELEM_XSD_2001,
UnknownExtensibilityElement.class);
extensionRegistry.registerDeserializer(Types.class, SchemaConstants.Q_ELEM_XSD_2001,
extensionRegistry.getDefaultDeserializer());
extensionRegistry.registerSerializer(Types.class, SchemaConstants.Q_ELEM_XSD_2001,
extensionRegistry.getDefaultSerializer());
wsdlReaderNoImport.setExtensionRegistry(extensionRegistry);
JarWSDLLocator wsdlLocator = new JarWSDLLocator(wsdlURI);
WSDLReader wsdlReader = wsdlFactory.newWSDLReader();
Thread thread = Thread.currentThread();
ClassLoader oldCl = thread.getContextClassLoader();
thread.setContextClassLoader(this.getClass().getClassLoader());
try {
try {
definition = wsdlReader.readWSDL(wsdlLocator);
} catch (WSDLException e) {
throw new DeploymentException("Failed to read wsdl document", e);
} catch (RuntimeException e) {
throw new DeploymentException(e.getMessage(), e);
}
} finally {
thread.setContextClassLoader(oldCl);
}
return definition;
}
public static ExtensibilityElement getExtensibilityElement(Class clazz, List extensibilityElements) throws DeploymentException {
for (Iterator iterator = extensibilityElements.iterator(); iterator.hasNext();) {
ExtensibilityElement extensibilityElement = (ExtensibilityElement) iterator.next();
if (clazz.isAssignableFrom(extensibilityElement.getClass())) {
return extensibilityElement;
}
}
throw new DeploymentException("No element of class " + clazz.getName() + " found");
}
public String movePortLocation(String portComponentName, String servletLocation) throws DeploymentException {
DefinitionsDocument doc = (DefinitionsDocument) wsdlMap.get(uris.get(0));
TDefinitions definitions = doc.getDefinitions();
TService[] services = definitions.getServiceArray();
for (int i = 0; i < services.length; i++) {
TService service = services[i];
TPort[] ports = service.getPortArray();
for (int j = 0; j < ports.length; j++) {
TPort port = ports[j];
if (port.getName().trim().equals(portComponentName)) {
XmlCursor portCursor = port.newCursor();
try {
if (portCursor.toChild(ADDRESS_QNAME)) {
if (servletLocation == null) {
String original = portCursor.getAttributeText(LOCATION_QNAME);
URI originalURI = new URI(original);
servletLocation = originalURI.getPath();
}
portCursor.setAttributeText(LOCATION_QNAME, WebServiceContainer.LOCATION_REPLACEMENT_TOKEN + servletLocation);
return servletLocation;
}
} catch (URISyntaxException e) {
throw new DeploymentException("Could not construct URI for ejb location in wsdl", e);
} finally {
portCursor.dispose();
}
}
}
}
throw new DeploymentException("No port found with name " + portComponentName + " expected at " + servletLocation);
}
private class JarEntityResolver implements EntityResolver {
private final static String PROJECT_URL_PREFIX = "project://local/";
public InputSource resolveEntity(String publicId, String systemId) throws SAXException, IOException {
//seems like this must be a bug in xmlbeans...
if (systemId.indexOf(PROJECT_URL_PREFIX) > -1) {
systemId = systemId.substring(PROJECT_URL_PREFIX.length());
}
URI location = ((URI) uris.peek()).resolve(systemId);
InputStream wsdlInputStream;
try {
ZipEntry entry = moduleFile.getEntry(location.toString());
wsdlInputStream = moduleFile.getInputStream(entry);
XmlObject xmlObject = SchemaDocument.Factory.parse(wsdlInputStream);
wsdlMap.put(location, xmlObject);
wsdlInputStream.close();
wsdlInputStream = moduleFile.getInputStream(entry);
} catch (XmlException e) {
throw (IOException) new IOException("Could not parse schema document").initCause(e);
}
return new InputSource(wsdlInputStream);
}
}
class JarWSDLLocator implements WSDLLocator {
private final List streams = new ArrayList();
private final URI wsdlURI;
private URI latestImportURI;
public JarWSDLLocator(URI wsdlURI) {
this.wsdlURI = wsdlURI;
}
public InputSource getBaseInputSource() {
InputStream wsdlInputStream;
ZipEntry entry = moduleFile.getEntry(wsdlURI.toString());
if(entry == null){
throw new RuntimeException("The webservices.xml file points to a non-existant WSDL file "+wsdlURI.toString());
}
try {
wsdlInputStream = moduleFile.getInputStream(entry);
DefinitionsDocument definition = DefinitionsDocument.Factory.parse(wsdlInputStream);
wsdlMap.put(wsdlURI, definition);
wsdlInputStream.close();
wsdlInputStream = moduleFile.getInputStream(entry);
streams.add(wsdlInputStream);
} catch (Exception e) {
throw new RuntimeException("Could not open stream to wsdl file", e);
}
return new InputSource(wsdlInputStream);
}
public String getBaseURI() {
return wsdlURI.toString();
}
public InputSource getImportInputSource(String parentLocation, String relativeLocation) {
URI parentURI = URI.create(parentLocation);
latestImportURI = parentURI.resolve(relativeLocation);
InputStream importInputStream;
try {
ZipEntry entry = moduleFile.getEntry(latestImportURI.toString());
importInputStream = moduleFile.getInputStream(entry);
try {
DefinitionsDocument definition = DefinitionsDocument.Factory.parse(importInputStream);
importInputStream.close();
wsdlMap.put(latestImportURI, definition);
importInputStream.close();
} catch (XmlException e) {
//probably was a schema rather than wsdl. If there are real problems they will show up later.
}
importInputStream = moduleFile.getInputStream(entry);
streams.add(importInputStream);
} catch (Exception e) {
throw new RuntimeException("Could not open stream to import file", e);
}
InputSource inputSource = new InputSource(importInputStream);
inputSource.setSystemId(getLatestImportURI());
return inputSource;
}
public String getLatestImportURI() {
return latestImportURI.toString();
}
public void close() {
for (Iterator iterator = streams.iterator(); iterator.hasNext();) {
InputStream inputStream = (InputStream) iterator.next();
try {
inputStream.close();
} catch (IOException e) {
//ignore
}
}
streams.clear();
}
}
}
| |
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.observable;
import java.util.concurrent.atomic.*;
import io.reactivex.*;
import io.reactivex.disposables.Disposable;
import io.reactivex.exceptions.Exceptions;
import io.reactivex.functions.Consumer;
import io.reactivex.internal.disposables.DisposableHelper;
import io.reactivex.internal.fuseable.HasUpstreamObservableSource;
import io.reactivex.internal.util.ExceptionHelper;
import io.reactivex.observables.ConnectableObservable;
import io.reactivex.plugins.RxJavaPlugins;
/**
* A connectable observable which shares an underlying source and dispatches source values to observers in a backpressure-aware
* manner.
* @param <T> the value type
*/
public final class ObservablePublish<T> extends ConnectableObservable<T> implements HasUpstreamObservableSource<T> {
/** The source observable. */
final ObservableSource<T> source;
/** Holds the current subscriber that is, will be or just was subscribed to the source observable. */
final AtomicReference<PublishObserver<T>> current;
final ObservableSource<T> onSubscribe;
/**
* Creates a OperatorPublish instance to publish values of the given source observable.
* @param <T> the source value type
* @param source the source observable
* @return the connectable observable
*/
public static <T> ConnectableObservable<T> create(ObservableSource<T> source) {
// the current connection to source needs to be shared between the operator and its onSubscribe call
final AtomicReference<PublishObserver<T>> curr = new AtomicReference<PublishObserver<T>>();
ObservableSource<T> onSubscribe = new ObservableSource<T>() {
@Override
public void subscribe(Observer<? super T> child) {
// create the backpressure-managing producer for this child
InnerDisposable<T> inner = new InnerDisposable<T>(child);
child.onSubscribe(inner);
// concurrent connection/disconnection may change the state,
// we loop to be atomic while the child subscribes
for (;;) {
// get the current subscriber-to-source
PublishObserver<T> r = curr.get();
// if there isn't one or it is disposed
if (r == null || r.isDisposed()) {
// create a new subscriber to source
PublishObserver<T> u = new PublishObserver<T>(curr);
// let's try setting it as the current subscriber-to-source
if (!curr.compareAndSet(r, u)) {
// didn't work, maybe someone else did it or the current subscriber
// to source has just finished
continue;
}
// we won, let's use it going onwards
r = u;
}
/*
* Try adding it to the current subscriber-to-source, add is atomic in respect
* to other adds and the termination of the subscriber-to-source.
*/
if (r.add(inner)) {
inner.setParent(r);
break; // NOPMD
}
/*
* The current PublishObserver has been terminated, try with a newer one.
*/
/*
* Note: although technically correct, concurrent disconnects can cause
* unexpected behavior such as child observers never receiving anything
* (unless connected again). An alternative approach, similar to
* PublishSubject would be to immediately terminate such child
* observers as well:
*
* Object term = r.terminalEvent;
* if (r.nl.isCompleted(term)) {
* child.onComplete();
* } else {
* child.onError(r.nl.getError(term));
* }
* return;
*
* The original concurrent behavior was non-deterministic in this regard as well.
* Allowing this behavior, however, may introduce another unexpected behavior:
* after disconnecting a previous connection, one might not be able to prepare
* a new connection right after a previous termination by subscribing new child
* observers asynchronously before a connect call.
*/
}
}
};
return RxJavaPlugins.onAssembly(new ObservablePublish<T>(onSubscribe, source, curr));
}
private ObservablePublish(ObservableSource<T> onSubscribe, ObservableSource<T> source,
final AtomicReference<PublishObserver<T>> current) {
this.onSubscribe = onSubscribe;
this.source = source;
this.current = current;
}
@Override
public ObservableSource<T> source() {
return source;
}
@Override
protected void subscribeActual(Observer<? super T> observer) {
onSubscribe.subscribe(observer);
}
@Override
public void connect(Consumer<? super Disposable> connection) {
boolean doConnect;
PublishObserver<T> ps;
// we loop because concurrent connect/disconnect and termination may change the state
for (;;) {
// retrieve the current subscriber-to-source instance
ps = current.get();
// if there is none yet or the current has been disposed
if (ps == null || ps.isDisposed()) {
// create a new subscriber-to-source
PublishObserver<T> u = new PublishObserver<T>(current);
// try setting it as the current subscriber-to-source
if (!current.compareAndSet(ps, u)) {
// did not work, perhaps a new subscriber arrived
// and created a new subscriber-to-source as well, retry
continue;
}
ps = u;
}
// if connect() was called concurrently, only one of them should actually
// connect to the source
doConnect = !ps.shouldConnect.get() && ps.shouldConnect.compareAndSet(false, true);
break; // NOPMD
}
/*
* Notify the callback that we have a (new) connection which it can dispose
* but since ps is unique to a connection, multiple calls to connect() will return the
* same Disposable and even if there was a connect-disconnect-connect pair, the older
* references won't disconnect the newer connection.
* Synchronous source consumers have the opportunity to disconnect via dispose on the
* Disposable as subscribe() may never return in its own.
*
* Note however, that asynchronously disconnecting a running source might leave
* child observers without any terminal event; PublishSubject does not have this
* issue because the dispose() was always triggered by the child observers
* themselves.
*/
try {
connection.accept(ps);
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
throw ExceptionHelper.wrapOrThrow(ex);
}
if (doConnect) {
source.subscribe(ps);
}
}
@SuppressWarnings("rawtypes")
static final class PublishObserver<T>
implements Observer<T>, Disposable {
/** Holds onto the current connected PublishObserver. */
final AtomicReference<PublishObserver<T>> current;
/** Indicates an empty array of inner observers. */
static final InnerDisposable[] EMPTY = new InnerDisposable[0];
/** Indicates a terminated PublishObserver. */
static final InnerDisposable[] TERMINATED = new InnerDisposable[0];
/** Tracks the subscribed observers. */
final AtomicReference<InnerDisposable<T>[]> observers;
/**
* Atomically changed from false to true by connect to make sure the
* connection is only performed by one thread.
*/
final AtomicBoolean shouldConnect;
final AtomicReference<Disposable> s = new AtomicReference<Disposable>();
@SuppressWarnings("unchecked")
PublishObserver(AtomicReference<PublishObserver<T>> current) {
this.observers = new AtomicReference<InnerDisposable<T>[]>(EMPTY);
this.current = current;
this.shouldConnect = new AtomicBoolean();
}
@SuppressWarnings("unchecked")
@Override
public void dispose() {
if (observers.get() != TERMINATED) {
InnerDisposable[] ps = observers.getAndSet(TERMINATED);
if (ps != TERMINATED) {
current.compareAndSet(PublishObserver.this, null);
DisposableHelper.dispose(s);
}
}
}
@Override
public boolean isDisposed() {
return observers.get() == TERMINATED;
}
@Override
public void onSubscribe(Disposable s) {
DisposableHelper.setOnce(this.s, s);
}
@Override
public void onNext(T t) {
for (InnerDisposable<T> inner : observers.get()) {
inner.child.onNext(t);
}
}
@SuppressWarnings("unchecked")
@Override
public void onError(Throwable e) {
current.compareAndSet(this, null);
InnerDisposable<T>[] a = observers.getAndSet(TERMINATED);
if (a.length != 0) {
for (InnerDisposable<T> inner : a) {
inner.child.onError(e);
}
} else {
RxJavaPlugins.onError(e);
}
}
@SuppressWarnings("unchecked")
@Override
public void onComplete() {
current.compareAndSet(this, null);
for (InnerDisposable<T> inner : observers.getAndSet(TERMINATED)) {
inner.child.onComplete();
}
}
/**
* Atomically try adding a new InnerDisposable to this Observer or return false if this
* Observer was terminated.
* @param producer the producer to add
* @return true if succeeded, false otherwise
*/
boolean add(InnerDisposable<T> producer) {
// the state can change so we do a CAS loop to achieve atomicity
for (;;) {
// get the current producer array
InnerDisposable<T>[] c = observers.get();
// if this subscriber-to-source reached a terminal state by receiving
// an onError or onComplete, just refuse to add the new producer
if (c == TERMINATED) {
return false;
}
// we perform a copy-on-write logic
int len = c.length;
@SuppressWarnings("unchecked")
InnerDisposable<T>[] u = new InnerDisposable[len + 1];
System.arraycopy(c, 0, u, 0, len);
u[len] = producer;
// try setting the observers array
if (observers.compareAndSet(c, u)) {
return true;
}
// if failed, some other operation succeeded (another add, remove or termination)
// so retry
}
}
/**
* Atomically removes the given producer from the observers array.
* @param producer the producer to remove
*/
@SuppressWarnings("unchecked")
void remove(InnerDisposable<T> producer) {
// the state can change so we do a CAS loop to achieve atomicity
for (;;) {
// let's read the current observers array
InnerDisposable<T>[] c = observers.get();
// if it is either empty or terminated, there is nothing to remove so we quit
int len = c.length;
if (len == 0) {
return;
}
// let's find the supplied producer in the array
// although this is O(n), we don't expect too many child observers in general
int j = -1;
for (int i = 0; i < len; i++) {
if (c[i].equals(producer)) {
j = i;
break;
}
}
// we didn't find it so just quit
if (j < 0) {
return;
}
// we do copy-on-write logic here
InnerDisposable<T>[] u;
// we don't create a new empty array if producer was the single inhabitant
// but rather reuse an empty array
if (len == 1) {
u = EMPTY;
} else {
// otherwise, create a new array one less in size
u = new InnerDisposable[len - 1];
// copy elements being before the given producer
System.arraycopy(c, 0, u, 0, j);
// copy elements being after the given producer
System.arraycopy(c, j + 1, u, j, len - j - 1);
}
// try setting this new array as
if (observers.compareAndSet(c, u)) {
return;
}
// if we failed, it means something else happened
// (a concurrent add/remove or termination), we need to retry
}
}
}
/**
* A Disposable that manages the request and disposed state of a
* child Observer in thread-safe manner.
* {@code this} holds the parent PublishObserver or itself if disposed
* @param <T> the value type
*/
static final class InnerDisposable<T>
extends AtomicReference<Object>
implements Disposable {
private static final long serialVersionUID = -1100270633763673112L;
/** The actual child subscriber. */
final Observer<? super T> child;
InnerDisposable(Observer<? super T> child) {
this.child = child;
}
@Override
public boolean isDisposed() {
return get() == this;
}
@SuppressWarnings("unchecked")
@Override
public void dispose() {
Object o = getAndSet(this);
if (o != null && o != this) {
((PublishObserver<T>)o).remove(this);
}
}
void setParent(PublishObserver<T> p) {
if (!compareAndSet(null, p)) {
p.remove(this);
}
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.program.database.properties;
import java.io.IOException;
import db.*;
import db.util.ErrorHandler;
import ghidra.program.database.map.AddressMap;
import ghidra.program.model.address.Address;
import ghidra.program.model.util.ObjectPropertyMap;
import ghidra.program.util.ChangeManager;
import ghidra.util.Msg;
import ghidra.util.Saveable;
import ghidra.util.classfinder.ClassTranslator;
import ghidra.util.exception.*;
import ghidra.util.prop.PropertyVisitor;
import ghidra.util.task.TaskMonitor;
/**
* Property manager that deals with properties that are of
* a Saveable Object type and store within a database table.
*/
public class ObjectPropertyMapDB extends PropertyMapDB implements ObjectPropertyMap {
private Class<? extends Saveable> saveableObjectClass;
private int saveableObjectVersion;
private boolean supportsPrivate;
/**
* Construct an Saveable object property map.
* @param dbHandle database handle.
* @param openMode the mode that the program was opened in.
* @param errHandler database error handler.
* @param changeMgr change manager for event notification
* @param addrMap address map.
* @param name property name.
* @param monitor progress monitor that is only used when upgrading
* @throws CancelledException if the user cancels the upgrade operation.
* @throws IOException if a database io error occurs.
* @throws VersionException the map version is incompatible with
* the current Saveable object class version. This will never be thrown
* if upgrade is true.
*/
public ObjectPropertyMapDB(DBHandle dbHandle, int openMode, ErrorHandler errHandler,
ChangeManager changeMgr, AddressMap addrMap, String name,
Class<? extends Saveable> saveableObjectClass, TaskMonitor monitor,
boolean supportsPrivate) throws VersionException, CancelledException, IOException {
super(dbHandle, errHandler, changeMgr, addrMap, name);
this.saveableObjectClass = saveableObjectClass;
this.supportsPrivate = supportsPrivate;
Saveable tokenInstance = null;
try {
if (saveableObjectClass == GenericSaveable.class) {
tokenInstance = new GenericSaveable(null, null);
}
else {
tokenInstance = saveableObjectClass.newInstance();
}
}
catch (InstantiationException e) {
throw new RuntimeException(
saveableObjectClass.getName() + " must provide public default constructor");
}
catch (IllegalAccessException e) {
throw new RuntimeException(
saveableObjectClass.getName() + " must provide public default constructor");
}
saveableObjectVersion = tokenInstance.getSchemaVersion();
checkMapVersion(openMode, tokenInstance, monitor);
}
/**
* Returns the class for the indicated class path name.
* If the class can't be determined,
* the GenericSaveable class is returned.
* @param classPath the class path name of the desired class.
* @return the class or a GenericSaveable.
*/
@SuppressWarnings("unchecked")
public static Class<? extends Saveable> getSaveableClassForName(String classPath) {
Class<?> c = null;
try {
c = Class.forName(classPath);
}
catch (ClassNotFoundException e) {
// Check the classNameMap.
String newClassPath = ClassTranslator.get(classPath);
if (newClassPath != null) {
classPath = newClassPath;
try {
c = Class.forName(newClassPath);
}
catch (ClassNotFoundException e1) {
// Since we can't get the class, at least handle it generically.
}
}
}
if (c == null) {
Msg.error(ObjectPropertyMapDB.class, "Object property class not found: " + classPath);
}
else if (!Saveable.class.isAssignableFrom(c)) {
Msg.error(ObjectPropertyMapDB.class,
"Object property class does not implement Saveable interface: " + classPath);
}
// If unable to get valid Saveable class use generic implementation
return (c != null) ? (Class<? extends Saveable>) c : GenericSaveable.class;
}
/**
* Verify that the storage schema has not changed.
* @param upgrade
* @param tokenInstance
* @throws VersionException
*/
private void checkMapVersion(int openMode, Saveable tokenInstance, TaskMonitor monitor)
throws VersionException, CancelledException, IOException {
if (propertyTable == null) {
return;
}
int schemaVersion = schema.getVersion();
if (schemaVersion > saveableObjectVersion) {
// A newer version was used to create the database
Msg.warn(this,
"Program properties utilize a newer version of: " + saveableObjectClass.getName() +
"(" + schemaVersion + ", " + saveableObjectVersion + ")");
throw new VersionException(VersionException.NEWER_VERSION, false);
}
else if (addrMap.isUpgraded() || schemaVersion < saveableObjectVersion) {
// An older version was used to create the database
if (openMode != DBConstants.UPGRADE) {
throw new VersionException(true);
}
if (!upgradeTable(tokenInstance, monitor)) {
Msg.showError(this, null, "Properties Removed on Upgrade",
"Warning! unable to upgrade properties for " + saveableObjectClass.getName() +
"\nThese properties have been removed.");
}
}
}
/**
* Attempt to upgrade the map table records to the current schema.
* If unable to upgrade any of the map records, the table is removed.
* @param tokenInstance
* @return true if all records were successfully upgrade. A false
* value indicates that one or more entries were dropped.
*/
private boolean upgradeTable(Saveable tokenInstance, TaskMonitor monitor)
throws CancelledException, IOException {
boolean allRecordsUpgraded = true;
AddressMap oldAddressMap = addrMap.getOldAddressMap();
monitor.initialize(propertyTable.getRecordCount() * 2);
int count = 0;
// Remove map table if upgrade not supported
if (!tokenInstance.isUpgradeable(schema.getVersion())) {
dbHandle.deleteTable(getTableName());
propertyTable = null;
schema = null;
return false;
}
DBHandle tmpDb = new DBHandle();
try {
tmpDb.startTransaction();
Schema newSchema = null;
Table tempTable = null;
// Upgrade map entries into temporary database
RecordIterator iter = propertyTable.iterator();
while (iter.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
DBRecord rec = iter.next();
ObjectStorageAdapterDB oldObjStorage = new ObjectStorageAdapterDB(rec);
ObjectStorageAdapterDB newObjStorage = new ObjectStorageAdapterDB();
if (!tokenInstance.upgrade(oldObjStorage, schema.getVersion(), newObjStorage)) {
allRecordsUpgraded = false;
continue; // skip
}
if (newSchema == null) {
// Create table on first entry upgrade
newSchema = newObjStorage.getSchema(saveableObjectVersion);
tempTable = tmpDb.createTable(getTableName(), newSchema);
}
Address addr = oldAddressMap.decodeAddress(rec.getKey());
DBRecord newRecord = newSchema.createRecord(addrMap.getKey(addr, true));
newObjStorage.save(newRecord);
if (tempTable != null) {
tempTable.putRecord(newRecord);
}
monitor.setProgress(++count);
}
// Remove old table
dbHandle.deleteTable(getTableName());
propertyTable = null;
schema = null;
if (tempTable == null) {
return false;
}
// Create new map table
propertyTable = dbHandle.createTable(getTableName(), newSchema);
schema = newSchema;
// Copy upgraded records
iter = tempTable.iterator();
while (iter.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
DBRecord rec = iter.next();
propertyTable.putRecord(rec);
monitor.setProgress(++count);
}
}
catch (IOException e) {
errHandler.dbError(e);
}
finally {
tmpDb.close();
}
return allRecordsUpgraded;
}
@Override
public void add(Address addr, Saveable value) {
lock.acquire();
try {
if (!saveableObjectClass.isAssignableFrom(value.getClass())) {
throw new IllegalArgumentException();
}
long key = addrMap.getKey(addr, true);
Saveable oldValue = (Saveable) getObject(addr);
String tableName = getTableName();
Schema s;
DBRecord rec;
if (saveableObjectClass != GenericSaveable.class) {
ObjectStorageAdapterDB objStorage = new ObjectStorageAdapterDB();
value.save(objStorage);
s = objStorage.getSchema(value.getSchemaVersion());
checkSchema(s);
createPropertyTable(tableName, s);
rec = schema.createRecord(key);
objStorage.save(rec);
}
else { // GenericSaveable
GenericSaveable genericSaveable = ((GenericSaveable) value);
DBRecord originalRec = genericSaveable.record;
s = genericSaveable.schema;
checkSchema(s);
createPropertyTable(tableName, s);
rec = originalRec.copy();
rec.setKey(key);
}
propertyTable.putRecord(rec);
cache.put(key, value);
if (!isPrivate(value)) {
changeMgr.setPropertyChanged(name, addr, oldValue, value);
}
}
catch (IOException e) {
errHandler.dbError(e);
}
finally {
lock.release();
}
}
private boolean isPrivate(Saveable value) {
if (!supportsPrivate) {
return false;
}
return value.isPrivate();
}
private void createPropertyTable(String tableName, Schema s) throws IOException {
if (propertyTable == null) {
schema = s;
propertyTable = dbHandle.createTable(tableName, schema);
}
}
private void checkSchema(Schema s) {
if (schema != null && !schema.equals(s)) {
throw new RuntimeException("incompatible property storage: class=" +
saveableObjectClass.getName() + " tableName=" + getTableName());
}
}
@Override
public Class<?> getObjectClass() {
return saveableObjectClass;
}
@Override
public Object getObject(Address addr) {
if (propertyTable == null) {
return null;
}
Saveable obj = null;
lock.acquire();
try {
long key = addrMap.getKey(addr, false);
if (key == AddressMap.INVALID_ADDRESS_KEY) {
return null;
}
obj = (Saveable) cache.get(key);
if (obj != null) {
return obj;
}
DBRecord rec = propertyTable.getRecord(key);
if (rec == null) {
return null;
}
ObjectStorageAdapterDB objStorage = new ObjectStorageAdapterDB(rec);
if (saveableObjectClass == GenericSaveable.class) {
obj = new GenericSaveable(rec, propertyTable.getSchema());
}
else {
obj = saveableObjectClass.newInstance();
obj.restore(objStorage);
}
}
catch (IOException e) {
errHandler.dbError(e);
}
catch (RuntimeException e) {
throw e;
}
catch (InstantiationException e) {
errHandler.dbError(new IOException("Could not instantiate " + e.getMessage()));
}
catch (Exception e) {
errHandler.dbError(new IOException(e.getMessage()));
}
finally {
lock.release();
}
return obj;
}
@Override
public void applyValue(PropertyVisitor visitor, Address addr) {
Saveable obj = (Saveable) getObject(addr);
if (obj != null) {
visitor.visit(obj);
}
}
// @see PropertyMapDB#getPropertyFieldClass() <- doesn't exist
/**
* NOTE: Custom schema is utilized.
*/
protected Class<?> getPropertyFieldClass() {
throw new AssertException();
}
/**
* Create the necessary table(s) to support this property.
* Schema will vary depending upon Saveable object.
*/
protected void createTable() {
throw new AssertException();
}
/**
* Attempt to upgrade the specified object map.
* @param dbHandle
* @param errHandler
* @param changeMgr
* @param addrMap
* @param name
* @param saveableObjectClass
* @param version
* @return upgraded map instance or null if unable to upgrade.
*/
static ObjectPropertyMapDB upgradeMap(DBHandle dbHandle, ErrorHandler errHandler,
ChangeManager changeMgr, AddressMap addrMap, String name, Class<?> saveableObjectClass,
int version) {
// TODO Fill-in stuff here....
return null;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.randomvalue;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.randomvalue.RandomValueMeta;
import org.pentaho.di.ui.core.dialog.EnterSelectionDialog;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class RandomValueDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = RandomValueMeta.class; // for i18n purposes, needed by Translator2!!
private Label wlStepname;
private Text wStepname;
private FormData fdlStepname, fdStepname;
private Label wlFields;
private TableView wFields;
private FormData fdlFields, fdFields;
private RandomValueMeta input;
public RandomValueDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (RandomValueMeta) in;
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "RandomValueDialog.DialogTitle" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "System.Label.StepName" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
wlFields = new Label( shell, SWT.NONE );
wlFields.setText( BaseMessages.getString( PKG, "RandomValueDialog.Fields.Label" ) );
props.setLook( wlFields );
fdlFields = new FormData();
fdlFields.left = new FormAttachment( 0, 0 );
fdlFields.top = new FormAttachment( wStepname, margin );
wlFields.setLayoutData( fdlFields );
final int FieldsCols = 2;
final int FieldsRows = input.getFieldName().length;
final String[] functionDesc = new String[RandomValueMeta.functions.length - 1];
for ( int i = 1; i < RandomValueMeta.functions.length; i++ ) {
functionDesc[i - 1] = RandomValueMeta.functions[i].getDescription();
}
ColumnInfo[] colinf = new ColumnInfo[FieldsCols];
colinf[0] =
new ColumnInfo(
BaseMessages.getString( PKG, "RandomValueDialog.NameColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT,
false );
colinf[1] =
new ColumnInfo(
BaseMessages.getString( PKG, "RandomValueDialog.TypeColumn.Column" ), ColumnInfo.COLUMN_TYPE_TEXT,
false );
colinf[1].setSelectionAdapter( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
EnterSelectionDialog esd = new EnterSelectionDialog( shell, functionDesc,
BaseMessages.getString( PKG, "RandomValueDialog.SelectInfoType.DialogTitle" ),
BaseMessages.getString( PKG, "RandomValueDialog.SelectInfoType.DialogMessage" ) );
String string = esd.open();
if ( string != null ) {
TableView tv = (TableView) e.widget;
tv.setText( string, e.x, e.y );
input.setChanged();
}
}
} );
wFields =
new TableView(
transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props );
fdFields = new FormData();
fdFields.left = new FormAttachment( 0, 0 );
fdFields.top = new FormAttachment( wlFields, margin );
fdFields.right = new FormAttachment( 100, 0 );
fdFields.bottom = new FormAttachment( 100, -50 );
wFields.setLayoutData( fdFields );
// Some buttons
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, wFields );
// Add listeners
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
wCancel.addListener( SWT.Selection, lsCancel );
wOK.addListener( SWT.Selection, lsOK );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
wStepname.setText( stepname );
for ( int i = 0; i < input.getFieldName().length; i++ ) {
TableItem item = wFields.table.getItem( i );
String name = input.getFieldName()[i];
String type = RandomValueMeta.getTypeDesc( input.getFieldType()[i] );
if ( name != null ) {
item.setText( 1, name );
}
if ( type != null ) {
item.setText( 2, type );
}
}
wFields.setRowNums();
wFields.optWidth( true );
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
return;
}
stepname = wStepname.getText(); // return value
int count = wFields.nrNonEmpty();
input.allocate( count );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < count; i++ ) {
TableItem item = wFields.getNonEmpty( i );
input.getFieldName()[i] = item.getText( 1 );
input.getFieldType()[i] = RandomValueMeta.getType( item.getText( 2 ) );
}
dispose();
}
}
| |
/**
* Copyright (c) 2013-2020 Contributors to the Eclipse Foundation
*
* See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.datastore.kudu.operations;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Predicate;
import java.util.stream.Stream;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.kudu.Schema;
import org.apache.kudu.client.AsyncKuduScanner;
import org.apache.kudu.client.AsyncKuduScanner.AsyncKuduScannerBuilder;
import org.apache.kudu.client.KuduPredicate;
import org.apache.kudu.client.KuduPredicate.ComparisonOp;
import org.apache.kudu.client.KuduTable;
import org.apache.kudu.client.RowResultIterator;
import org.locationtech.geowave.core.index.ByteArrayRange;
import org.locationtech.geowave.core.index.SinglePartitionQueryRanges;
import org.locationtech.geowave.core.store.CloseableIterator;
import org.locationtech.geowave.core.store.CloseableIteratorWrapper;
import org.locationtech.geowave.core.store.entities.GeoWaveRow;
import org.locationtech.geowave.core.store.entities.GeoWaveRowIteratorTransformer;
import org.locationtech.geowave.core.store.entities.GeoWaveRowMergingIterator;
import org.locationtech.geowave.core.store.util.RowConsumer;
import org.locationtech.geowave.datastore.kudu.KuduRow;
import org.locationtech.geowave.datastore.kudu.KuduRow.KuduField;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Streams;
import com.stumbleupon.async.Callback;
import com.stumbleupon.async.Deferred;
public class KuduRangeRead<T> {
private static final Logger LOGGER = LoggerFactory.getLogger(KuduRangeRead.class);
private static final int MAX_CONCURRENT_READ = 100;
private static final int MAX_BOUNDED_READS_ENQUEUED = 1000000;
private final Collection<SinglePartitionQueryRanges> ranges;
private final Schema schema;
private final short[] adapterIds;
private final KuduTable table;
private final KuduOperations operations;
private final boolean visibilityEnabled;
private final Predicate<GeoWaveRow> filter;
private final GeoWaveRowIteratorTransformer<T> rowTransformer;
private final boolean rowMerging;
// only allow so many outstanding async reads or writes, use this semaphore
// to control it
private final Semaphore readSemaphore = new Semaphore(MAX_CONCURRENT_READ);
protected KuduRangeRead(
final Collection<SinglePartitionQueryRanges> ranges,
final short[] adapterIds,
final KuduTable table,
final KuduOperations operations,
final boolean visibilityEnabled,
final Predicate<GeoWaveRow> filter,
final GeoWaveRowIteratorTransformer<T> rowTransformer,
final boolean rowMerging) {
this.ranges = ranges;
this.adapterIds = adapterIds;
this.table = table;
this.schema = table.getSchema();
this.operations = operations;
this.visibilityEnabled = visibilityEnabled;
this.filter = filter;
this.rowTransformer = rowTransformer;
this.rowMerging = rowMerging;
}
public CloseableIterator<T> results() {
final List<AsyncKuduScanner> scanners = new ArrayList<>();
final KuduPredicate adapterIdPred =
KuduPredicate.newInListPredicate(
schema.getColumn(KuduField.GW_ADAPTER_ID_KEY.getFieldName()),
Arrays.asList(ArrayUtils.toObject(adapterIds)));
if ((ranges != null) && !ranges.isEmpty()) {
for (final SinglePartitionQueryRanges r : ranges) {
byte[] partitionKey = r.getPartitionKey();
if (partitionKey == null) {
partitionKey = new byte[0];
}
final KuduPredicate partitionPred =
KuduPredicate.newComparisonPredicate(
schema.getColumn(KuduField.GW_PARTITION_ID_KEY.getFieldName()),
ComparisonOp.EQUAL,
partitionKey);
for (final ByteArrayRange range : r.getSortKeyRanges()) {
final byte[] start = range.getStart() != null ? range.getStart() : new byte[0];
final byte[] end =
range.getEnd() != null ? range.getEndAsNextPrefix()
: new byte[] {
(byte) 0xFF,
(byte) 0xFF,
(byte) 0xFF,
(byte) 0xFF,
(byte) 0xFF,
(byte) 0xFF,
(byte) 0xFF};
final KuduPredicate lowerPred =
KuduPredicate.newComparisonPredicate(
schema.getColumn(KuduField.GW_SORT_KEY.getFieldName()),
ComparisonOp.GREATER_EQUAL,
start);
final KuduPredicate upperPred =
KuduPredicate.newComparisonPredicate(
schema.getColumn(KuduField.GW_SORT_KEY.getFieldName()),
ComparisonOp.LESS,
end);
final AsyncKuduScannerBuilder scannerBuilder = operations.getAsyncScannerBuilder(table);
final AsyncKuduScanner scanner =
scannerBuilder.addPredicate(adapterIdPred).addPredicate(partitionPred).addPredicate(
lowerPred).addPredicate(upperPred).build();
scanners.add(scanner);
}
}
} else {
final AsyncKuduScannerBuilder scannerBuilder = operations.getAsyncScannerBuilder(table);
final AsyncKuduScanner scanner = scannerBuilder.addPredicate(adapterIdPred).build();
scanners.add(scanner);
}
return executeQueryAsync(scanners);
}
public CloseableIterator<T> executeQueryAsync(final List<AsyncKuduScanner> scanners) {
final BlockingQueue<Object> results = new LinkedBlockingQueue<>(MAX_BOUNDED_READS_ENQUEUED);
final AtomicBoolean isCanceled = new AtomicBoolean(false);
new Thread(() -> {
final AtomicInteger queryCount = new AtomicInteger(1);
for (final AsyncKuduScanner scanner : scanners) {
try {
readSemaphore.acquire();
executeScanner(
scanner,
readSemaphore,
results,
queryCount,
isCanceled,
visibilityEnabled,
filter,
rowTransformer,
rowMerging);
} catch (final InterruptedException e) {
LOGGER.warn("Exception while executing query", e);
readSemaphore.release();
}
}
// then decrement
if (queryCount.decrementAndGet() <= 0) {
// and if there are no queries, there may not have been any statements submitted
try {
results.put(RowConsumer.POISON);
} catch (final InterruptedException e) {
LOGGER.error("Interrupted while finishing blocking queue, this may result in deadlock!");
}
}
}, "Kudu Query Executor").start();
return new CloseableIteratorWrapper<T>(() -> isCanceled.set(true), new RowConsumer(results));
}
public void checkFinalize(
final AsyncKuduScanner scanner,
final Semaphore semaphore,
final BlockingQueue<Object> resultQueue,
final AtomicInteger queryCount) {
scanner.close();
semaphore.release();
if (queryCount.decrementAndGet() <= 0) {
try {
resultQueue.put(RowConsumer.POISON);
} catch (final InterruptedException e) {
LOGGER.error("Interrupted while finishing blocking queue, this may result in deadlock!");
}
}
}
public Deferred<Object> executeScanner(
final AsyncKuduScanner scanner,
final Semaphore semaphore,
final BlockingQueue<Object> resultQueue,
final AtomicInteger queryCount,
final AtomicBoolean isCanceled,
final boolean visibilityEnabled,
final Predicate<GeoWaveRow> filter,
final GeoWaveRowIteratorTransformer<T> rowTransformer,
final boolean rowMerging) {
// Errback class
class QueryErrback implements Callback<Deferred<Object>, Exception> {
@Override
public Deferred<Object> call(final Exception e) {
LOGGER.warn("While scanning rows from kudu", e);
checkFinalize(scanner, semaphore, resultQueue, queryCount);
return Deferred.fromError(e);
}
}
final QueryErrback errBack = new QueryErrback();
// callback class
class QueryCallback implements Callback<Deferred<Object>, RowResultIterator> {
@Override
public Deferred<Object> call(final RowResultIterator rs) {
if ((rs == null) || isCanceled.get()) {
checkFinalize(scanner, semaphore, resultQueue, queryCount);
return Deferred.fromResult(null);
}
if (rs.getNumRows() > 0) {
Stream<GeoWaveRow> tmpStream = Streams.stream(rs.iterator()).map(KuduRow::new);
if (visibilityEnabled) {
tmpStream = tmpStream.filter(filter);
}
final Iterator<GeoWaveRow> tmpIterator = tmpStream.iterator();
rowTransformer.apply(
rowMerging ? new GeoWaveRowMergingIterator(tmpIterator)
: tmpIterator).forEachRemaining(row -> {
try {
resultQueue.put(row);
} catch (final InterruptedException e) {
LOGGER.warn("interrupted while waiting to enqueue a kudu result", e);
}
});
}
if (scanner.hasMoreRows()) {
return scanner.nextRows().addCallbackDeferring(this).addErrback(errBack);
}
checkFinalize(scanner, semaphore, resultQueue, queryCount);
return Deferred.fromResult(null);
}
}
queryCount.incrementAndGet();
return scanner.nextRows().addCallbackDeferring(new QueryCallback()).addErrback(errBack);
}
}
| |
/*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.jstype.EnumElementType;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.NamedType;
import com.google.javascript.rhino.jstype.NoType;
import com.google.javascript.rhino.jstype.ObjectType;
import com.google.javascript.rhino.jstype.ProxyObjectType;
import com.google.javascript.rhino.jstype.TemplateType;
import com.google.javascript.rhino.jstype.TemplatizedType;
import com.google.javascript.rhino.jstype.UnionType;
import com.google.javascript.rhino.jstype.Visitor;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* Gathers property names defined in externs.
*/
class GatherExternProperties extends AbstractPostOrderCallback
implements CompilerPass {
private final Set<String> externProperties = new LinkedHashSet<>();
private final AbstractCompiler compiler;
private final ExtractRecordTypePropertyNames typeVisitor =
new ExtractRecordTypePropertyNames();
public GatherExternProperties(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverseEs6(compiler, externs, this);
compiler.setExternProperties(ImmutableSet.copyOf(externProperties));
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getType()) {
case GETPROP:
// Gathers "name" from (someObject.name).
Node dest = n.getSecondChild();
if (dest.isString()) {
externProperties.add(dest.getString());
}
break;
case OBJECTLIT:
// Gathers "name" and "address" from ({name: null, address: null}).
for (Node child = n.getFirstChild();
child != null;
child = child.getNext()) {
externProperties.add(child.getString());
}
break;
}
// Gather field names from the type of the node (if any).
JSType type = n.getJSType();
if (type != null) {
typeVisitor.visitOnce(type);
}
// Gather field names from the @typedef declaration.
// Typedefs are declared on qualified name nodes.
if (n.isQualifiedName()) {
// Get the JSDoc for the current node and check if it contains a
// typedef.
JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(n);
if (jsDoc != null && jsDoc.hasTypedefType()) {
// Get the corresponding type by looking at the type registry.
JSType typedefType = compiler.getTypeIRegistry().getType(n.getQualifiedName());
if (typedefType != null) {
typeVisitor.visitOnce(typedefType);
}
}
}
}
private class ExtractRecordTypePropertyNames
implements Visitor<Set<String>> {
private final Set<JSType> seenTypes = Sets.newIdentityHashSet();
public void visitOnce(JSType type) {
// Handle recursive types by only ever visiting the same type once.
if (seenTypes.add(type)) {
type.visit(this);
}
}
// Interesting cases first, no-ops later.
@Override
public Set<String> caseEnumElementType(EnumElementType type) {
// Descend into the enum's element type.
// @enum {T}
visitOnce(type.getPrimitiveType());
return externProperties;
}
@Override
public Set<String> caseFunctionType(FunctionType type) {
// Visit parameter types.
// function(T1, T2), as well as @param {T}
for (Node param : type.getParameters()) {
visitOnce(param.getJSType());
}
// Visit the return type.
// function(): T, as well as @return {T}
visitOnce(type.getReturnType());
// @interface
if (type.isInterface()) {
// Visit the extended interfaces.
// @extends {T}
for (JSType extendedType : type.getExtendedInterfaces()) {
visitOnce(extendedType);
}
}
// @constructor
if (type.isConstructor()) {
// Visit the implemented interfaces.
// @implements {T}
for (JSType implementedType : type.getOwnImplementedInterfaces()) {
visitOnce(implementedType);
}
// Visit the parent class (if any).
// @extends {T}
JSType superClass = type.getPrototype().getImplicitPrototype();
if (superClass != null) {
visitOnce(superClass);
}
}
return externProperties;
}
@Override
public Set<String> caseObjectType(ObjectType type) {
// Record types.
// {a: T1, b: T2}.
if (type.isRecordType()) {
for (String propertyName : type.getOwnPropertyNames()) {
// After type inference it is possible that some nodes in externs
// can have types which are defined in non-extern code. To avoid
// bleeding property names of such types into externs we check that
// the node for each property was defined in externs.
if (type.getPropertyNode(propertyName).isFromExterns()) {
externProperties.add(propertyName);
visitOnce(type.getPropertyType(propertyName));
}
}
}
return externProperties;
}
@Override
public Set<String> caseNamedType(NamedType type) {
// Treat as all other proxy objects.
return caseProxyObjectType(type);
}
@Override
public Set<String> caseProxyObjectType(ProxyObjectType type) {
// Visit the proxied type.
// @typedef {T}
type.visitReferenceType(this);
return externProperties;
}
@Override
public Set<String> caseUnionType(UnionType type) {
// Visit the alternatives.
// T1|T2|T3
for (JSType alternateType : type.getAlternates()) {
visitOnce(alternateType);
}
return externProperties;
}
@Override
public Set<String> caseTemplatizedType(TemplatizedType type) {
// Visit the type arguments.
// SomeType.<T1, T2>
for (JSType templateType : type.getTemplateTypes()) {
visitOnce(templateType);
}
return externProperties;
}
@Override
public Set<String> caseNoType(NoType type) {
return externProperties;
}
@Override
public Set<String> caseAllType() {
return externProperties;
}
@Override
public Set<String> caseBooleanType() {
return externProperties;
}
@Override
public Set<String> caseNoObjectType() {
return externProperties;
}
@Override
public Set<String> caseUnknownType() {
return externProperties;
}
@Override
public Set<String> caseNullType() {
return externProperties;
}
@Override
public Set<String> caseNumberType() {
return externProperties;
}
@Override
public Set<String> caseStringType() {
return externProperties;
}
@Override
public Set<String> caseVoidType() {
return externProperties;
}
@Override
public Set<String> caseTemplateType(TemplateType templateType) {
return externProperties;
}
}
}
| |
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.itachi1706.droideggs.QEgg;
import android.animation.ObjectAnimator;
import android.animation.TimeAnimator;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.ColorStateList;
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PixelFormat;
import android.graphics.Shader;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.HapticFeedbackConstants;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import androidx.appcompat.app.AppCompatActivity;
import com.google.android.material.snackbar.Snackbar;
import com.itachi1706.droideggs.PlatLogoCommon;
import com.itachi1706.droideggs.QEgg.EasterEgg.quares.QuaresActivity;
import com.itachi1706.droideggs.R;
/**
* Created by Kenneth on 14/12/2019.
* for com.itachi1706.droideggs.QEgg in DroidEggs
*/
public class PlatLogoActivityQ extends AppCompatActivity {
ImageView mZeroView, mOneView;
BackslashDrawable mBackslash;
int mClicks;
static final Paint sPaint = new Paint();
static {
sPaint.setStyle(Paint.Style.STROKE);
sPaint.setStrokeWidth(4f);
sPaint.setStrokeCap(Paint.Cap.SQUARE);
}
@Override
protected void onPause() {
if (mBackslash != null) {
mBackslash.stopAnimating();
}
mClicks = 0;
super.onPause();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final float dp = getResources().getDisplayMetrics().density;
getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getWindow().setNavigationBarColor(0);
getWindow().setStatusBarColor(0);
}
if (getActionBar() != null) getActionBar().hide();
else if (getSupportActionBar() != null) getSupportActionBar().hide();
setContentView(R.layout.q_platlogo_layout);
mBackslash = new BackslashDrawable((int) (50 * dp));
mOneView = findViewById(R.id.one);
mOneView.setImageDrawable(new OneDrawable());
mZeroView = findViewById(R.id.zero);
mZeroView.setImageDrawable(new ZeroDrawable());
final ViewGroup root = (ViewGroup) mOneView.getParent();
root.setClipChildren(false);
root.setBackground(mBackslash);
root.getBackground().setAlpha(0x20);
View.OnTouchListener tl = new View.OnTouchListener() {
float mOffsetX, mOffsetY;
long mClickTime;
ObjectAnimator mRotAnim;
@Override
public boolean onTouch(View v, MotionEvent event) {
PlatLogoCommon.measureTouchPressure(event);
switch (event.getActionMasked()) {
case MotionEvent.ACTION_DOWN:
v.animate().scaleX(1.1f).scaleY(1.1f);
v.getParent().bringChildToFront(v);
mOffsetX = event.getRawX() - v.getX();
mOffsetY = event.getRawY() - v.getY();
long now = System.currentTimeMillis();
if (now - mClickTime < 350) {
mRotAnim = ObjectAnimator.ofFloat(v, View.ROTATION,
v.getRotation(), v.getRotation() + 3600);
mRotAnim.setDuration(10000);
mRotAnim.start();
mClickTime = 0;
} else {
mClickTime = now;
}
break;
case MotionEvent.ACTION_MOVE:
v.setX(event.getRawX() - mOffsetX);
v.setY(event.getRawY() - mOffsetY);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
v.performHapticFeedback(HapticFeedbackConstants.TEXT_HANDLE_MOVE);
}
break;
case MotionEvent.ACTION_UP:
v.performClick();
// fall through
case MotionEvent.ACTION_CANCEL:
v.animate().scaleX(1f).scaleY(1f);
if (mRotAnim != null) mRotAnim.cancel();
testOverlap();
break;
}
return true;
}
};
findViewById(R.id.one).setOnTouchListener(tl);
findViewById(R.id.zero).setOnTouchListener(tl);
findViewById(R.id.text).setOnTouchListener(tl);
}
private void testOverlap() {
final float width = mZeroView.getWidth();
final float targetX = mZeroView.getX() + width * .2f;
final float targetY = mZeroView.getY() + width * .3f;
if (Math.hypot(targetX - mOneView.getX(), targetY - mOneView.getY()) < width * .2f
&& Math.abs(mOneView.getRotation() % 360 - 315) < 15) {
mOneView.animate().x(mZeroView.getX() + width * .2f);
mOneView.animate().y(mZeroView.getY() + width * .3f);
mOneView.setRotation(mOneView.getRotation() % 360);
mOneView.animate().rotation(315);
//mOneView.performHapticFeedback(HapticFeedbackConstants.CONFIRM);
mOneView.performHapticFeedback(HapticFeedbackConstants.KEYBOARD_TAP); // CONFIRM is hidden API
mBackslash.startAnimating();
mClicks++;
if (mClicks >= 7) {
launchNextStage();
}
} else {
mBackslash.stopAnimating();
}
}
private void launchNextStage() {
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(getApplicationContext());
if (pref.getLong("Q_EGG_MODE", 0) == 0) {
// For posterity: the moment this user unlocked the easter egg
pref.edit().putLong("Q_EGG_MODE", System.currentTimeMillis()).apply();
}
try {
// MAKE SURE YOU ARE AT LEAST MARSHMALLOW (API 23)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
Intent q = new Intent(PlatLogoActivityQ.this, QuaresActivity.class).setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
startActivity(q);
}
else {
Snackbar.make(findViewById(android.R.id.content), "Your version of Android is too low to advance further. Requires Android 6.0 Marshmallow to advance", Snackbar.LENGTH_LONG).show();
return;
}
} catch (ActivityNotFoundException ex) {
Log.e("PlatLogoActivity", "No more eggs.");
}
finish();
}
static final String TOUCH_STATS = "touch.stats";
@Override
public void onStart() {
super.onStart();
PlatLogoCommon.syncTouchPressure(TOUCH_STATS, getApplicationContext());
}
@Override
public void onStop() {
PlatLogoCommon.syncTouchPressure(TOUCH_STATS, getApplicationContext());
super.onStop();
}
static class ZeroDrawable extends Drawable {
int mTintColor;
@Override
public void draw(Canvas canvas) {
sPaint.setColor(mTintColor | 0xFF000000);
canvas.save();
canvas.scale(canvas.getWidth() / 24f, canvas.getHeight() / 24f);
canvas.drawCircle(12f, 12f, 10f, sPaint);
canvas.restore();
}
@Override
public void setAlpha(int alpha) { }
@Override
public void setColorFilter(ColorFilter colorFilter) { }
@Override
public void setTintList(ColorStateList tint) {
mTintColor = tint.getDefaultColor();
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
}
static class OneDrawable extends Drawable {
int mTintColor;
@Override
public void draw(Canvas canvas) {
sPaint.setColor(mTintColor | 0xFF000000);
canvas.save();
canvas.scale(canvas.getWidth() / 24f, canvas.getHeight() / 24f);
final Path p = new Path();
p.moveTo(12f, 21.83f);
p.rLineTo(0f, -19.67f);
p.rLineTo(-5f, 0f);
canvas.drawPath(p, sPaint);
canvas.restore();
}
@Override
public void setAlpha(int alpha) { }
@Override
public void setColorFilter(ColorFilter colorFilter) { }
@Override
public void setTintList(ColorStateList tint) {
mTintColor = tint.getDefaultColor();
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
}
private static class BackslashDrawable extends Drawable implements TimeAnimator.TimeListener {
Bitmap mTile;
Paint mPaint = new Paint();
BitmapShader mShader;
TimeAnimator mAnimator = new TimeAnimator();
Matrix mMatrix = new Matrix();
public void draw(Canvas canvas) {
canvas.drawPaint(mPaint);
}
BackslashDrawable(int width) {
int height = width + 1 - 1;
mTile = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);
mAnimator.setTimeListener(this);
final Canvas tileCanvas = new Canvas(mTile);
final float w = tileCanvas.getWidth();
final float h = tileCanvas.getHeight();
final Path path = new Path();
path.moveTo(0, 0);
path.lineTo(w / 2, 0);
path.lineTo(w, h / 2);
path.lineTo(w, h);
path.close();
path.moveTo(0, h / 2);
path.lineTo(w / 2, h);
path.lineTo(0, h);
path.close();
final Paint slashPaint = new Paint();
slashPaint.setAntiAlias(true);
slashPaint.setStyle(Paint.Style.FILL);
slashPaint.setColor(0xFF000000);
tileCanvas.drawPath(path, slashPaint);
//mPaint.setColor(0xFF0000FF);
mShader = new BitmapShader(mTile, Shader.TileMode.REPEAT, Shader.TileMode.REPEAT);
mPaint.setShader(mShader);
}
public void startAnimating() {
if (!mAnimator.isStarted()) {
mAnimator.start();
}
}
public void stopAnimating() {
if (mAnimator.isStarted()) {
mAnimator.cancel();
}
}
@Override
public void setAlpha(int alpha) {
mPaint.setAlpha(alpha);
}
@Override
public void setColorFilter(ColorFilter colorFilter) {
mPaint.setColorFilter(colorFilter);
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
@Override
public void onTimeUpdate(TimeAnimator animation, long totalTime, long deltaTime) {
if (mShader != null) {
mMatrix.postTranslate(deltaTime / 4f, 0);
mShader.setLocalMatrix(mMatrix);
invalidateSelf();
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: BceGhostMoveStop.proto
package com.xinqihd.sns.gameserver.proto;
public final class XinqiBceGhostMoveStop {
private XinqiBceGhostMoveStop() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface BceGhostMoveStopOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int32 x = 1;
boolean hasX();
int getX();
// required int32 y = 2;
boolean hasY();
int getY();
}
public static final class BceGhostMoveStop extends
com.google.protobuf.GeneratedMessage
implements BceGhostMoveStopOrBuilder {
// Use BceGhostMoveStop.newBuilder() to construct.
private BceGhostMoveStop(Builder builder) {
super(builder);
}
private BceGhostMoveStop(boolean noInit) {}
private static final BceGhostMoveStop defaultInstance;
public static BceGhostMoveStop getDefaultInstance() {
return defaultInstance;
}
public BceGhostMoveStop getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_fieldAccessorTable;
}
private int bitField0_;
// required int32 x = 1;
public static final int X_FIELD_NUMBER = 1;
private int x_;
public boolean hasX() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public int getX() {
return x_;
}
// required int32 y = 2;
public static final int Y_FIELD_NUMBER = 2;
private int y_;
public boolean hasY() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
public int getY() {
return y_;
}
private void initFields() {
x_ = 0;
y_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasX()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasY()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, x_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt32(2, y_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, x_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, y_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStopOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_fieldAccessorTable;
}
// Construct using com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
x_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
y_ = 0;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop.getDescriptor();
}
public com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop getDefaultInstanceForType() {
return com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop.getDefaultInstance();
}
public com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop build() {
com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
private com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop buildPartial() {
com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop result = new com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.x_ = x_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.y_ = y_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop) {
return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop other) {
if (other == com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop.getDefaultInstance()) return this;
if (other.hasX()) {
setX(other.getX());
}
if (other.hasY()) {
setY(other.getY());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasX()) {
return false;
}
if (!hasY()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
x_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
y_ = input.readInt32();
break;
}
}
}
}
private int bitField0_;
// required int32 x = 1;
private int x_ ;
public boolean hasX() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public int getX() {
return x_;
}
public Builder setX(int value) {
bitField0_ |= 0x00000001;
x_ = value;
onChanged();
return this;
}
public Builder clearX() {
bitField0_ = (bitField0_ & ~0x00000001);
x_ = 0;
onChanged();
return this;
}
// required int32 y = 2;
private int y_ ;
public boolean hasY() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
public int getY() {
return y_;
}
public Builder setY(int value) {
bitField0_ |= 0x00000002;
y_ = value;
onChanged();
return this;
}
public Builder clearY() {
bitField0_ = (bitField0_ & ~0x00000002);
y_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BceGhostMoveStop)
}
static {
defaultInstance = new BceGhostMoveStop(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BceGhostMoveStop)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\026BceGhostMoveStop.proto\022 com.xinqihd.sn" +
"s.gameserver.proto\"(\n\020BceGhostMoveStop\022\t" +
"\n\001x\030\001 \002(\005\022\t\n\001y\030\002 \002(\005B\027B\025XinqiBceGhostMov" +
"eStop"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_com_xinqihd_sns_gameserver_proto_BceGhostMoveStop_descriptor,
new java.lang.String[] { "X", "Y", },
com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop.class,
com.xinqihd.sns.gameserver.proto.XinqiBceGhostMoveStop.BceGhostMoveStop.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
import processing.core.*; import java.io.*; import processing.net.*; import netscape.javascript.*; import java.applet.*; import java.awt.*; import java.awt.image.*; import java.awt.event.*; import java.io.*; import java.net.*; import java.text.*; import java.util.*; import java.util.zip.*; public class arcflow_home extends PApplet {
PFont font;
nanoxml.XMLElement xml,xmlt;
int frame,lengthAnim = 50;
int colors;
Object[] nodes;
MapSystem ls;
PImage b;
public void setup(){
frameRate(20);
size(1000,640,JAVA2D);
background(0xffeeeeee);
noFill();
font = createFont("LucidaGrande-10.vlw",10);
textFont(font, 10);
fill(0xffcc4444);
noStroke();
smooth();
frame = 0;
ls = new MapSystem();
b = loadImage("BlankMap-World.png");
xml = new nanoxml.XMLElement();
String xmlString="";
String[] lines;
if (online) {
lines=loadStrings(param("flowbase"));
} else {
lines=loadStrings("data/full-compressed.xml");
}
for(int i=0; i<lines.length; i++) xmlString+=lines[i];
xml.parseString(xmlString);
// retrieve all childnodes and store them in an array
nodes = xml.getChildren().toArray();
String title = (String)xml.getAttribute("title");
String countries = title+": ";
//Iterate through periods
Vector periodVector=xml.getChildrenForPath("timeseries/period");
// turn into an iterator
Enumeration periodEnum=periodVector.elements();
int nodeIndex = 0;
while(periodEnum.hasMoreElements()) {
nanoxml.XMLElement period=(nanoxml.XMLElement)periodEnum.nextElement();
int periodYr = period.getIntAttribute("value");
ls.addPeriod(new Period(periodYr));
//Iterate through locations (nations)
Vector locVector=period.getChildren();
// turn into an iterator
Enumeration locEnum=locVector.elements();
while(locEnum.hasMoreElements()) {
nanoxml.XMLElement location=(nanoxml.XMLElement)locEnum.nextElement();
double value = location.getDoubleAttribute("v");
String agent = location.getStringAttribute("a");
String recipient = location.getStringAttribute("r");
double start_x = ((location.getDoubleAttribute("x")+140)*3);
double start_y = (location.getDoubleAttribute("y")*-4+340);
double end_x = ((location.getDoubleAttribute("x2")+140)*3);
double end_y = (location.getDoubleAttribute("y2")*-4+340);
ls.addLabel((float)start_x,(float)start_y,agent);
ls.addLabel((float)end_x,(float)end_y,recipient);
ls.addLocation(nodeIndex,new Location(agent,recipient,start_x,start_y,end_x,end_y,value));
}
nodeIndex = nodeIndex + 1;
}
xmlt = new nanoxml.XMLElement();
String xmlStringt="";
String[] linest;
if (online) {
linest=loadStrings(param("flowsums"));
} else {
linest=loadStrings("data/full-sums.xml");
}
for(int i=0; i<linest.length; i++) xmlStringt+=linest[i];
xml.parseString(xmlStringt);
// retrieve all childnodes and store them in an array
nodes = xml.getChildren().toArray();
//Iterate through periods
Vector timeseriesVector=xml.getChildrenForPath("timeseries/period");
// turn into an iterator
Enumeration timeseriesEnum=timeseriesVector.elements();
nodeIndex = 0;
while(timeseriesEnum.hasMoreElements()) {
nanoxml.XMLElement period=(nanoxml.XMLElement)timeseriesEnum.nextElement();
int periodYr = period.getIntAttribute("value");
double value = period.getDoubleAttribute("sum");
ls.addTimelineCircle(new TimelineCircle(periodYr,value));
nodeIndex = nodeIndex + 1;
}
//JS.callJS("$('spinner').hide();$('armsflow_container').show();", this);
JSObject.getWindow(this).eval( "$('spinner').hide();$('armsflow_container').show();" );
}
public void draw(){
background(0xffeeeeee);
tint(0xffeeeeee);
strokeCap(SQUARE);
image(b, -70, 20, 1100, 540);
font = createFont("LucidaGrande-10.vlw",10);
textFont(font, 10);
//text(frame+1950,10,20);
fill(0xff888888,160);
if (online) {
text("Exports, "+param("year"),9,18);
} else {
text("Exports",9,18);
}
//x-axis labels
for (int i=1950;i<=2006;i+=5) {
text(i,(16.42f*(i-1950)) + 27,620);
}
//mark where we are
//fill(#cc4444,210);
//ellipse((float)(8*(frame)) + 135,290,7,7);
ls.renderPeriod(0); //each period will show for 10 frames
frame = frame + 1;
if (frame > (lengthAnim-1)) {
//frame = 0;
frameRate(12);
}
}
public void mousePressed() {
ls.click();
if ((mouseY > 560) && (640 > mouseY)) {
if ((mouseX > 23) && (977 > mouseX)) {
int periodYr = (int)(mouseX/16.42f + 1948);
link(param("urlbase")+"/flow/year/"+periodYr);
//JSObject.getWindow(this).eval( "alert(\""+periodYr+"\")" );
}
}
}
class TimelineCircle {
double scaleVal = 1000;
double value, yearPos;
int periodYr;
TimelineCircle (int _periodYr,double _value) {
periodYr = _periodYr;
value = _value;
}
public void render() {
yearPos = (16.42f*(periodYr-1950)) + 40;
//hovers
fill(0xff888888,80);
noStroke();
if ((mouseY > 560) && (640 > mouseY)) {
if ((yearPos+8 > mouseX) && (mouseX > yearPos-8)) {
rectMode(CENTER);
fill(0xff888888,160);
text((int)periodYr,(int)yearPos-13,570);
fill(0xffcc4444,250);
} else {
fill(0xffcc4444,210);
}
} else {
fill(0xffcc4444,210);
}
//value-circle
double _diameter = log((float)(value/scaleVal)+1)*20;
ellipse((float)yearPos,590,(float)_diameter,(float)_diameter);
fill(0xff333333,120);
}
}
class Location {
double start_x, start_y, end_x, end_y, loc_y, value;
String agent,recipient;
double scaleVal = 1000;
Location (String _agent, String _recipient, double _start_x, double _start_y, double _end_x, double _end_y, double _value) {
value = _value;
agent = _agent;
recipient = _recipient;
if (_start_x < 10) {
_start_x = _start_x + 180;
}
if (_end_x < 10) {
_end_x = _end_x + 180;
}
start_x = _start_x;
start_y = _start_y;
end_x = _end_x;
end_y = _end_y;
}
public void render() {
fill(0xffcc4444,210);
stroke(0xffcc4444,210);
double _diameter = log((float)(value/scaleVal)+1)*20;
strokeWeight((float)_diameter+1);
noFill();
float startArc,endArc;
double temp;
float arc_x = (float)start_x,arc_y = (float)end_y,arc_w,arc_h;
arc_w = (float)abs((int)(end_x-start_x))*2;
arc_h = (float)abs((int)(end_y-start_y))*2;
float a_0 = PI/-2, a_3 = 0, a_6 = PI/2, a_9 = PI, a_12 = TWO_PI-PI/2,a_15 = TWO_PI;
// Arc angles
if (end_x>start_x) { // Going E
if (end_y>start_y) { // Going SE
startArc = a_0;
endArc = a_3;
} else { // Going NE //CC
startArc = a_9;
endArc = a_12;
arc_x = (float)end_x;
arc_y = (float)start_y;
}
} else { // Going W
if (end_y>start_y) { // Going SW //CC
startArc = a_9;
endArc = a_12;
} else { // Going NW
startArc = a_6;
endArc = a_9;
}
}
//animate:
if (frame <= lengthAnim) {
if (end_x>start_x) { // Going E
if (end_y>start_y) { // Going SE
endArc = (startArc+(PI/2)*((float)frame/(float)lengthAnim));
} else { // Going NE //CC
endArc = (startArc+(PI/2)*((float)frame/(float)lengthAnim));
}
} else { // Going W
if (end_y>start_y) { // Going SW //CC
startArc = (endArc-(PI/2)*((float)frame/(float)lengthAnim));
} else { // Going NW
endArc = (startArc+(PI/2)*((float)frame/(float)lengthAnim));
}
}
/* //east or west?
if (end_x>start_x) { // Going E
endArc = (startArc+(PI/2)*((float)frame/(float)lengthAnim));
} else { // Going W
startArc = (endArc-(PI/2)*((float)frame/(float)lengthAnim));
}
*/ }
arc(arc_x,arc_y,arc_w,arc_h,startArc,endArc);
//line((float)start_x,(float)start_y,(float)end_x,(float)end_y);
/*noStroke();
fill(#333333,120);
int label = (int) value;
//text(agent,(float)(loc_x-6+_diameter/2),(float)(loc_y+6));
if ((mouseY > end_y-20) && (end_y+20 > mouseY)) {
if ((end_x+20 > mouseX) && (mouseX > end_x-20)) {
fill(#888888,100);
ellipse((float)end_x,(float)end_y,(float)_diameter*10,(float)_diameter*10);
fill(#444444,200);
text(recipient,(float)end_x,(float)end_y);
fill(#cc4444,250);
} else {
fill(#cc4444,210);
}
}*/
}
}
class Period {
int periodYr;
ArrayList locations; //Dead location storage
Period (int _periodYr) {
locations = new ArrayList();
periodYr = _periodYr;
}
public void addLocation(Location _l) {
locations.add(_l);
}
public void render() {
//Iterate through all locations
for (int i = locations.size()-1; i>=0; i--) {
Location b = (Location) locations.get(i);
b.render();
}
}
}
class Label {
double x, y;
String label,yearPos;
Label (double _x, double _y, String _label) {
label = _label;
x = _x;
y = _y;
if (online) {
yearPos = param("year");
} else {
yearPos = "2006";
}
}
public String label() {
return label;
}
public boolean prox() {
if ((mouseX+10 > x)&&(mouseX-10 < x)) {
if ((mouseY+10 > y)&&(mouseY-10 < y)) {
return true;
} else {
return false;
}
} else {
return false;
}
}
public void render() {
font = loadFont("Georgia-16.vlw");
textFont(font, 16);
float textwidth = textWidth(label)+10;
fill(0xff111111,40);
rect((float)x-textwidth/2+2,(float)y-36+2,textwidth,26);
triangle((float)x+2,(float)y,(float)x-8+2,(float)y-8,(float)x+8+2,(float)y-8);
fill(0xffd3dde7);
rect((float)(int)(x-textwidth/2),(float)(int)(y-36),textwidth,26);
triangle((float)x,(float)y,(float)x-8,(float)y-12,(float)x+8,(float)y-12);
fill(0xff333333);
textAlign(CENTER);
text(label,(float)x,(float)y-17);
noStroke();
textAlign(LEFT);
}
}
class MapSystem {
ArrayList periods,timelineCircles,labels; //Dead period storage
MapSystem () {
periods = new ArrayList();
timelineCircles = new ArrayList();
labels = new ArrayList();
}
public void addPeriod(Period _p) {
periods.add(_p);
}
public void addTimelineCircle(TimelineCircle _p) {
timelineCircles.add(_p);
}
public void addLabel(float _x, float _y,String _label) {
boolean isUnique = true;
for (int i = labels.size()-1; i>=0; i--) {
Label l = (Label) labels.get(i);
if (l.label().equals(_label)) {
isUnique = false;
}
}
//Add logic = if it's still unique...
if (isUnique == true) {
labels.add(new Label(_x,_y,_label));
}
}
public void addLocation(int _p,Location _l) {
Period p = (Period) periods.get(_p);
p.addLocation(_l);
}
public void render() {
background(0xffeeeeee);
//Iterate through all periods
for (int i = periods.size()-1; i>=0; i--) {
Period b = (Period) periods.get(i);
b.render();
}
}
public void renderPeriod(int _p) {
Period p = (Period) periods.get(_p);
p.render();
for (int i = timelineCircles.size()-1; i>=0; i--) {
TimelineCircle b = (TimelineCircle) timelineCircles.get(i);
b.render();
}
//Base this on x,y...
//This should eventually just be based on which ones are close to mousex, mousey
for (int i = labels.size()-1; i>=0; i--) {
Label l = (Label) labels.get(i);
if (l.prox()) {
l.render();
}
}
}
public void click() {
for (int i = labels.size()-1; i>=0; i--) {
Label l = (Label) labels.get(i);
if (l.prox()) {
link(param("urlbase")+"/flow/country/"+l.label+"."+param("year"));
}
}
}
public int periodCount() {
return periods.size();
}
}
static public void main(String args[]) { PApplet.main(new String[] { "arcflow_home" }); }}
| |
/*
* Copyright 2013 University of Chicago and Argonne National Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package exm.stc.ic.opt;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.log4j.Logger;
import exm.stc.common.Settings;
import exm.stc.common.lang.Arg;
import exm.stc.common.lang.ExecContext;
import exm.stc.common.lang.Location;
import exm.stc.common.lang.Types;
import exm.stc.common.lang.Types.StructType;
import exm.stc.common.lang.Types.StructType.StructField;
import exm.stc.common.lang.Types.Type;
import exm.stc.common.lang.Var;
import exm.stc.ic.opt.OptimizerPass.FunctionOptimizerPass;
import exm.stc.ic.opt.TreeWalk.TreeWalker;
import exm.stc.ic.tree.ICContinuations.Continuation;
import exm.stc.ic.tree.ICContinuations.ContinuationType;
import exm.stc.ic.tree.ICContinuations.NestedBlock;
import exm.stc.ic.tree.ICContinuations.WaitStatement;
import exm.stc.ic.tree.ICInstructions.Instruction;
import exm.stc.ic.tree.ICTree.Block;
import exm.stc.ic.tree.ICTree.Function;
/**
* Compile-time pipelining optimization where we merge sequentially dependent
* tasks. This reduces scheduling/task dispatch/load balancing overhead, and
* also eliminates the need to move intermediate data.
*
* This is a pass that should be run once near end of optimization.
*
* Running it multiple times can result in reduction in parallelism
*/
public class Pipeline extends FunctionOptimizerPass {
@Override
public String getPassName() {
return "Compile time pipelining";
}
@Override
public String getConfigEnabledKey() {
return Settings.OPT_PIPELINE;
}
@Override
public void optimize(Logger logger, Function f) {
boolean maybeInLoop = f.isAsync() ? false : true;
pipelineTasks(logger, f, f.mainBlock(), ExecContext.control(), maybeInLoop);
}
/**
*
* @param logger
* @param f
* @param curr
* @param cx current exec context
* @param maybeInLoop if there's maybe a loop between the current
* context and the root of the task we're in
*/
private static void pipelineTasks(Logger logger, Function f, Block curr,
ExecContext cx, boolean maybeInLoop) {
// Do a bottom-up tree walk
for (Continuation cont: curr.allComplexStatements()) {
boolean contInLoop;
if (cont.isAsync()) {
// New task
contInLoop = false;
} else if (cont.isLoop()) {
// Sync loop
contInLoop = true;
} else {
contInLoop = maybeInLoop;
}
ExecContext childCx = cont.childContext(cx);
for (Block childBlock: cont.getBlocks()) {
pipelineTasks(logger, f, childBlock, childCx, contInLoop);
}
}
if (maybeInLoop) {
// Don't try to optimize, might serialise things
return;
}
// Find candidates for merging: wait statements which are not
// blocked on anything and which execute in same context as this
// block.
List<WaitStatement> candidates = new ArrayList<WaitStatement>();
for (Continuation cont: curr.getContinuations()) {
if (cont.getType() == ContinuationType.WAIT_STATEMENT) {
WaitStatement w = (WaitStatement)cont;
ExecContext waitChildContext = w.childContext(cx);
boolean compatible = true;
if (!w.getWaitVars().isEmpty()) {
// Can't merge if we have to wait before execution
compatible = false;
} else if (!waitChildContext.equals(cx) &&
!waitChildContext.isWildcardContext()) {
// Can't merge different contexts
compatible = false;
} else if (w.isParallel()) {
compatible = false;
} else if (!Location.isAnyLocation(w.targetLocation().rank, true)) {
// TODO: this is overly conservative, could check location of
// this block
compatible = false;
}
if (compatible) {
candidates.add(w);
}
}
}
if (candidates.isEmpty()) {
// Nothing to merge up
return;
}
logger.trace("Found " + candidates.size() + " candidates for " +
" wait pipelining");
WaitStatement bestCand = candidates.get(0);
if (candidates.size() > 1) {
int bestCost = heuristicCost(logger, f, curr, bestCand);
for (int i = 1; i < candidates.size(); i++) {
WaitStatement cand = candidates.get(i);
int cost = heuristicCost(logger, f, curr, cand);
if (cost < bestCost) {
bestCost = cost;
bestCand = cand;
}
}
}
if (candidates.size() == 1) {
bestCand.inlineInto(curr);
} else {
// Need to make sure local code runs after tasks are spawned
NestedBlock nested = new NestedBlock();
bestCand.inlineInto(nested.getBlock());
nested.setRunLast(true);
curr.addContinuation(nested);
}
}
private static int heuristicCost(Logger logger, Function f,
Block curr, WaitStatement cand) {
final Set<Var> varsReadByChildTask = new HashSet<Var>();
final Set<Var> varsDeclaredWithinChildTask = new HashSet<Var>();
TreeWalker walker = new TreeWalker() {
@Override
protected void visit(Instruction inst) {
for (Arg in: inst.getInputs()) {
if (in.isVar()) {
varsReadByChildTask.add(in.getVar());
}
}
}
@Override
protected void visit(Block block) {
varsDeclaredWithinChildTask.addAll(block.variables());
}};
// Find variables used in child task
TreeWalk.walkSyncChildren(logger, f, cand.getBlock(), true, walker);
// Only count variables that were passed in
varsReadByChildTask.removeAll(varsDeclaredWithinChildTask);
int cost = 0;
for (Var passed: varsReadByChildTask) {
cost += costOfPassing(logger, passed.type());
}
return cost;
}
/**
* Heuristic score
*
* TODO: this is simplistic, since this doesn't incorporate whether the
* variable is produced or consumed by the child or the exact mechanism
* of data transfer
* @param logger
* @param t
* @return
*/
private static int costOfPassing(Logger logger, Type t) {
if (Types.isFile(t)) {
// Files tend to be large
return 20;
} else if (Types.isBlob(t)) {
// Blobs also tend to be fairly large
return 5;
} else if (Types.isPrimFuture(t) || Types.isRef(t)) {
// Baseline cost is plain future: 1
return 1;
} else if (Types.isPrimValue(t)) {
return 0;
} else if (Types.isContainer(t)) {
return 1;
} else if (Types.isStruct(t)) {
StructType st = (StructType)t.getImplType();
int totalCost = 0;
for (StructField sf: st.fields()) {
totalCost += costOfPassing(logger, sf.type());
}
return totalCost;
} else {
logger.warn("Don't know how to calculate passing cost for type: " + t);
return 1;
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.salesforceupsert;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.SourceToTargetMapping;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.salesforceinput.SalesforceConnection;
import org.pentaho.di.trans.steps.salesforceinput.SalesforceConnectionUtils;
import org.pentaho.di.trans.steps.salesforceupsert.SalesforceUpsertMeta;
import org.pentaho.di.ui.core.database.dialog.DatabaseDialog;
import org.pentaho.di.ui.core.dialog.EnterMappingDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.ComboVar;
import org.pentaho.di.ui.core.widget.LabelTextVar;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.ui.trans.step.TableItemInsertListener;
public class SalesforceUpsertDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = SalesforceUpsertMeta.class; // for i18n purposes, needed by Translator2!!
private CTabFolder wTabFolder;
private FormData fdTabFolder;
private CTabItem wGeneralTab;
private Composite wGeneralComp;
private FormData fdGeneralComp;
private FormData fdlModule, fdModule;
private FormData fdlUpsertField, fdUpsertField;
private FormData fdlBatchSize, fdBatchSize;
private FormData fdUserName, fdURL, fdPassword;
private Label wlModule, wlBatchSize;
private Label wlUpsertField;
private Map<String, Integer> inputFields;
private ColumnInfo[] ciReturn;
private Button wDoMapping;
private FormData fdDoMapping;
private Label wlReturn;
private TableView wReturn;
private FormData fdlReturn, fdReturn;
private Button wGetLU;
private FormData fdGetLU;
private Listener lsGetLU;
private SalesforceUpsertMeta input;
private LabelTextVar wUserName, wURL, wPassword;
private TextVar wBatchSize;
private ComboVar wModule;
private CCombo wUpsertField;
private Button wTest;
private FormData fdTest;
private Listener lsTest;
private Group wConnectionGroup;
private FormData fdConnectionGroup;
private Group wSettingsGroup, wOutFieldsGroup;
private FormData fdSettingsGroup, fdOutFieldsGroup;
private Label wlSalesforceIDFieldName;
private FormData fdlSalesforceIDFieldName;
private TextVar wSalesforceIDFieldName;
private FormData fdSalesforceIDFieldName;
private boolean gotModule = false;
private boolean getModulesListError = false; /* True if error getting modules list */
private Label wlUseCompression;
private FormData fdlUseCompression;
private Button wUseCompression;
private FormData fdUseCompression;
private Label wlTimeOut;
private FormData fdlTimeOut;
private TextVar wTimeOut;
private FormData fdTimeOut;
private Label wlRollbackAllChangesOnError;
private FormData fdlRollbackAllChangesOnError;
private Button wRollbackAllChangesOnError;
private FormData fdRollbackAllChangesOnError;
/**
* List of ColumnInfo that should have the field names of the selected database table
*/
private static List<ColumnInfo> tableFieldColumns = new ArrayList<ColumnInfo>();
private boolean gotFields = false;
public SalesforceUpsertDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (SalesforceUpsertMeta) in;
inputFields = new HashMap<String, Integer>();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
ModifyListener lsTableMod = new ModifyListener() {
public void modifyText( ModifyEvent arg0 ) {
input.setChanged();
setModuleFieldCombo();
}
};
SelectionAdapter lsSelection = new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
input.setChanged();
setModuleFieldCombo();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.DialogTitle" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "System.Label.StepName" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.top = new FormAttachment( 0, margin );
fdlStepname.right = new FormAttachment( middle, -margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
wTabFolder = new CTabFolder( shell, SWT.BORDER );
props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB );
// ////////////////////////
// START OF FILE TAB ///
// ////////////////////////
wGeneralTab = new CTabItem( wTabFolder, SWT.NONE );
wGeneralTab.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.General.Tab" ) );
wGeneralComp = new Composite( wTabFolder, SWT.NONE );
props.setLook( wGeneralComp );
FormLayout generalLayout = new FormLayout();
generalLayout.marginWidth = 3;
generalLayout.marginHeight = 3;
wGeneralComp.setLayout( generalLayout );
// ///////////////////////////////
// START OF Connection GROUP //
// ///////////////////////////////
wConnectionGroup = new Group( wGeneralComp, SWT.SHADOW_NONE );
props.setLook( wConnectionGroup );
wConnectionGroup.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.ConnectionGroup.Label" ) );
FormLayout connectionGroupLayout = new FormLayout();
connectionGroupLayout.marginWidth = 10;
connectionGroupLayout.marginHeight = 10;
wConnectionGroup.setLayout( connectionGroupLayout );
// Webservice URL
wURL = new LabelTextVar( transMeta, wConnectionGroup,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.URL.Label" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.URL.Tooltip" ) );
props.setLook( wURL );
wURL.addModifyListener( lsMod );
fdURL = new FormData();
fdURL.left = new FormAttachment( 0, 0 );
fdURL.top = new FormAttachment( wStepname, margin );
fdURL.right = new FormAttachment( 100, 0 );
wURL.setLayoutData( fdURL );
// UserName line
wUserName = new LabelTextVar( transMeta, wConnectionGroup,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.User.Label" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.User.Tooltip" ) );
props.setLook( wUserName );
wUserName.addModifyListener( lsMod );
fdUserName = new FormData();
fdUserName.left = new FormAttachment( 0, 0 );
fdUserName.top = new FormAttachment( wURL, margin );
fdUserName.right = new FormAttachment( 100, 0 );
wUserName.setLayoutData( fdUserName );
// Password line
wPassword = new LabelTextVar( transMeta, wConnectionGroup,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.Password.Label" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.Password.Tooltip" ) );
props.setLook( wPassword );
wPassword.setEchoChar( '*' );
wPassword.addModifyListener( lsMod );
fdPassword = new FormData();
fdPassword.left = new FormAttachment( 0, 0 );
fdPassword.top = new FormAttachment( wUserName, margin );
fdPassword.right = new FormAttachment( 100, 0 );
wPassword.setLayoutData( fdPassword );
wPassword.getTextWidget().addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent e ) {
DatabaseDialog.checkPasswordVisible( wPassword.getTextWidget() );
}
} );
// Test Salesforce connection button
wTest = new Button( wConnectionGroup, SWT.PUSH );
wTest.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.TestConnection.Label" ) );
props.setLook( wTest );
fdTest = new FormData();
wTest.setToolTipText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.TestConnection.Tooltip" ) );
// fdTest.left = new FormAttachment(middle, 0);
fdTest.top = new FormAttachment( wPassword, margin );
fdTest.right = new FormAttachment( 100, 0 );
wTest.setLayoutData( fdTest );
fdConnectionGroup = new FormData();
fdConnectionGroup.left = new FormAttachment( 0, margin );
fdConnectionGroup.top = new FormAttachment( wStepname, margin );
fdConnectionGroup.right = new FormAttachment( 100, -margin );
wConnectionGroup.setLayoutData( fdConnectionGroup );
// ///////////////////////////////
// END OF Connection GROUP //
// ///////////////////////////////
// ///////////////////////////////
// START OF Settings GROUP //
// ///////////////////////////////
wSettingsGroup = new Group( wGeneralComp, SWT.SHADOW_NONE );
props.setLook( wSettingsGroup );
wSettingsGroup.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.SettingsGroup.Label" ) );
FormLayout settingGroupLayout = new FormLayout();
settingGroupLayout.marginWidth = 10;
settingGroupLayout.marginHeight = 10;
wSettingsGroup.setLayout( settingGroupLayout );
// Timeout
wlTimeOut = new Label( wSettingsGroup, SWT.RIGHT );
wlTimeOut.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.TimeOut.Label" ) );
props.setLook( wlTimeOut );
fdlTimeOut = new FormData();
fdlTimeOut.left = new FormAttachment( 0, 0 );
fdlTimeOut.top = new FormAttachment( wSettingsGroup, margin );
fdlTimeOut.right = new FormAttachment( middle, -margin );
wlTimeOut.setLayoutData( fdlTimeOut );
wTimeOut = new TextVar( transMeta, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wTimeOut );
wTimeOut.addModifyListener( lsMod );
fdTimeOut = new FormData();
fdTimeOut.left = new FormAttachment( middle, 0 );
fdTimeOut.top = new FormAttachment( wSettingsGroup, margin );
fdTimeOut.right = new FormAttachment( 100, 0 );
wTimeOut.setLayoutData( fdTimeOut );
// Use compression?
wlUseCompression = new Label( wSettingsGroup, SWT.RIGHT );
wlUseCompression.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.UseCompression.Label" ) );
props.setLook( wlUseCompression );
fdlUseCompression = new FormData();
fdlUseCompression.left = new FormAttachment( 0, 0 );
fdlUseCompression.top = new FormAttachment( wTimeOut, margin );
fdlUseCompression.right = new FormAttachment( middle, -margin );
wlUseCompression.setLayoutData( fdlUseCompression );
wUseCompression = new Button( wSettingsGroup, SWT.CHECK );
props.setLook( wUseCompression );
wUseCompression
.setToolTipText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.UseCompression.Tooltip" ) );
fdUseCompression = new FormData();
fdUseCompression.left = new FormAttachment( middle, 0 );
fdUseCompression.top = new FormAttachment( wTimeOut, margin );
wUseCompression.setLayoutData( fdUseCompression );
// Rollback all changes on error?
wlRollbackAllChangesOnError = new Label( wSettingsGroup, SWT.RIGHT );
wlRollbackAllChangesOnError.setText( BaseMessages.getString(
PKG, "SalesforceUpsertDialog.RollbackAllChangesOnError.Label" ) );
props.setLook( wlRollbackAllChangesOnError );
fdlRollbackAllChangesOnError = new FormData();
fdlRollbackAllChangesOnError.left = new FormAttachment( 0, 0 );
fdlRollbackAllChangesOnError.top = new FormAttachment( wUseCompression, margin );
fdlRollbackAllChangesOnError.right = new FormAttachment( middle, -margin );
wlRollbackAllChangesOnError.setLayoutData( fdlRollbackAllChangesOnError );
wRollbackAllChangesOnError = new Button( wSettingsGroup, SWT.CHECK );
props.setLook( wRollbackAllChangesOnError );
wRollbackAllChangesOnError.setToolTipText( BaseMessages.getString(
PKG, "SalesforceUpsertDialog.RollbackAllChangesOnError.Tooltip" ) );
fdRollbackAllChangesOnError = new FormData();
fdRollbackAllChangesOnError.left = new FormAttachment( middle, 0 );
fdRollbackAllChangesOnError.top = new FormAttachment( wUseCompression, margin );
wRollbackAllChangesOnError.setLayoutData( fdRollbackAllChangesOnError );
// BatchSize value
wlBatchSize = new Label( wSettingsGroup, SWT.RIGHT );
wlBatchSize.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.Limit.Label" ) );
props.setLook( wlBatchSize );
fdlBatchSize = new FormData();
fdlBatchSize.left = new FormAttachment( 0, 0 );
fdlBatchSize.top = new FormAttachment( wRollbackAllChangesOnError, margin );
fdlBatchSize.right = new FormAttachment( middle, -margin );
wlBatchSize.setLayoutData( fdlBatchSize );
wBatchSize = new TextVar( transMeta, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wBatchSize );
wBatchSize.addModifyListener( lsMod );
fdBatchSize = new FormData();
fdBatchSize.left = new FormAttachment( middle, 0 );
fdBatchSize.top = new FormAttachment( wRollbackAllChangesOnError, margin );
fdBatchSize.right = new FormAttachment( 100, 0 );
wBatchSize.setLayoutData( fdBatchSize );
// Module
wlModule = new Label( wSettingsGroup, SWT.RIGHT );
wlModule.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.Module.Label" ) );
props.setLook( wlModule );
fdlModule = new FormData();
fdlModule.left = new FormAttachment( 0, 0 );
fdlModule.top = new FormAttachment( wBatchSize, margin );
fdlModule.right = new FormAttachment( middle, -margin );
wlModule.setLayoutData( fdlModule );
wModule = new ComboVar( transMeta, wSettingsGroup, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER );
wModule.setEditable( true );
props.setLook( wModule );
wModule.addModifyListener( lsTableMod );
wModule.addSelectionListener( lsSelection );
fdModule = new FormData();
fdModule.left = new FormAttachment( middle, 0 );
fdModule.top = new FormAttachment( wBatchSize, margin );
fdModule.right = new FormAttachment( 100, -margin );
wModule.setLayoutData( fdModule );
wModule.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
getModulesListError = false;
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
// check if the URL and login credentials passed and not just had error
if ( Const.isEmpty( wURL.getText() )
|| Const.isEmpty( wUserName.getText() ) || Const.isEmpty( wPassword.getText() )
|| ( getModulesListError ) ) {
return;
}
Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT );
shell.setCursor( busy );
getModulesList();
shell.setCursor( null );
busy.dispose();
}
} );
// Upsert Field
wlUpsertField = new Label( wSettingsGroup, SWT.RIGHT );
wlUpsertField.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.Upsert.Label" ) );
props.setLook( wlUpsertField );
fdlUpsertField = new FormData();
fdlUpsertField.left = new FormAttachment( 0, 0 );
fdlUpsertField.top = new FormAttachment( wModule, margin );
fdlUpsertField.right = new FormAttachment( middle, -margin );
wlUpsertField.setLayoutData( fdlUpsertField );
wUpsertField = new CCombo( wSettingsGroup, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER );
wUpsertField.setEditable( true );
props.setLook( wUpsertField );
wUpsertField.addModifyListener( lsMod );
fdUpsertField = new FormData();
fdUpsertField.left = new FormAttachment( middle, 0 );
fdUpsertField.top = new FormAttachment( wModule, margin );
fdUpsertField.right = new FormAttachment( 100, -margin );
wUpsertField.setLayoutData( fdUpsertField );
wUpsertField.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
getFieldsList();
}
} );
fdSettingsGroup = new FormData();
fdSettingsGroup.left = new FormAttachment( 0, margin );
fdSettingsGroup.top = new FormAttachment( wConnectionGroup, margin );
fdSettingsGroup.right = new FormAttachment( 100, -margin );
wSettingsGroup.setLayoutData( fdSettingsGroup );
// ///////////////////////////////
// END OF Settings GROUP //
// ///////////////////////////////
// ///////////////////////////////
// START OF OutFields GROUP //
// ///////////////////////////////
wOutFieldsGroup = new Group( wGeneralComp, SWT.SHADOW_NONE );
props.setLook( wOutFieldsGroup );
wOutFieldsGroup.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.OutFieldsGroup.Label" ) );
FormLayout OutFieldsGroupLayout = new FormLayout();
OutFieldsGroupLayout.marginWidth = 10;
OutFieldsGroupLayout.marginHeight = 10;
wOutFieldsGroup.setLayout( OutFieldsGroupLayout );
// SalesforceIDFieldName
wlSalesforceIDFieldName = new Label( wOutFieldsGroup, SWT.RIGHT );
wlSalesforceIDFieldName.setText( BaseMessages.getString(
PKG, "SalesforceUpsertDialog.SalesforceIDFieldName.Label" ) );
props.setLook( wlSalesforceIDFieldName );
fdlSalesforceIDFieldName = new FormData();
fdlSalesforceIDFieldName.left = new FormAttachment( 0, 0 );
fdlSalesforceIDFieldName.top = new FormAttachment( wSettingsGroup, margin );
fdlSalesforceIDFieldName.right = new FormAttachment( middle, -margin );
wlSalesforceIDFieldName.setLayoutData( fdlSalesforceIDFieldName );
wSalesforceIDFieldName = new TextVar( transMeta, wOutFieldsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wSalesforceIDFieldName );
wSalesforceIDFieldName.setToolTipText( BaseMessages.getString(
PKG, "SalesforceUpsertDialog.SalesforceIDFieldName.Tooltip" ) );
wSalesforceIDFieldName.addModifyListener( lsMod );
fdSalesforceIDFieldName = new FormData();
fdSalesforceIDFieldName.left = new FormAttachment( middle, 0 );
fdSalesforceIDFieldName.top = new FormAttachment( wSettingsGroup, margin );
fdSalesforceIDFieldName.right = new FormAttachment( 100, 0 );
wSalesforceIDFieldName.setLayoutData( fdSalesforceIDFieldName );
fdOutFieldsGroup = new FormData();
fdOutFieldsGroup.left = new FormAttachment( 0, margin );
fdOutFieldsGroup.top = new FormAttachment( wSettingsGroup, margin );
fdOutFieldsGroup.right = new FormAttachment( 100, -margin );
wOutFieldsGroup.setLayoutData( fdOutFieldsGroup );
// ///////////////////////////////
// END OF OutFields GROUP //
// ///////////////////////////////
// THE UPDATE/INSERT TABLE
wlReturn = new Label( wGeneralComp, SWT.NONE );
wlReturn.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.UpdateFields.Label" ) );
props.setLook( wlReturn );
fdlReturn = new FormData();
fdlReturn.left = new FormAttachment( 0, 0 );
fdlReturn.top = new FormAttachment( wOutFieldsGroup, margin );
wlReturn.setLayoutData( fdlReturn );
int UpInsCols = 3;
int UpInsRows = ( input.getUpdateLookup() != null ? input.getUpdateLookup().length : 1 );
ciReturn = new ColumnInfo[UpInsCols];
ciReturn[0] =
new ColumnInfo(
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ColumnInfo.TableField" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false );
ciReturn[1] =
new ColumnInfo(
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ColumnInfo.StreamField" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false );
ciReturn[2] =
new ColumnInfo(
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ColumnInfo.UseExternalId" ),
ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "Y", "N" } );
ciReturn[2].setToolTip( BaseMessages
.getString( PKG, "SalesforceUpdateDialog.ColumnInfo.UseExternalId.Tooltip" ) );
tableFieldColumns.add( ciReturn[0] );
wReturn =
new TableView( transMeta, wGeneralComp, SWT.BORDER
| SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciReturn, UpInsRows, lsMod, props );
wGetLU = new Button( wGeneralComp, SWT.PUSH );
wGetLU.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.GetAndUpdateFields.Label" ) );
fdGetLU = new FormData();
fdGetLU.top = new FormAttachment( wlReturn, margin );
fdGetLU.right = new FormAttachment( 100, 0 );
wGetLU.setLayoutData( fdGetLU );
wDoMapping = new Button( wGeneralComp, SWT.PUSH );
wDoMapping.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.EditMapping.Label" ) );
fdDoMapping = new FormData();
fdDoMapping.top = new FormAttachment( wGetLU, margin );
fdDoMapping.right = new FormAttachment( 100, 0 );
wDoMapping.setLayoutData( fdDoMapping );
wDoMapping.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event arg0 ) {
generateMappings();
}
} );
fdReturn = new FormData();
fdReturn.left = new FormAttachment( 0, 0 );
fdReturn.top = new FormAttachment( wlReturn, margin );
fdReturn.right = new FormAttachment( wGetLU, -5 * margin );
fdReturn.bottom = new FormAttachment( 100, -2 * margin );
wReturn.setLayoutData( fdReturn );
//
// Search the fields in the background
//
final Runnable runnable = new Runnable() {
public void run() {
StepMeta stepMeta = transMeta.findStep( stepname );
if ( stepMeta != null ) {
try {
RowMetaInterface row = transMeta.getPrevStepFields( stepMeta );
// Remember these fields...
for ( int i = 0; i < row.size(); i++ ) {
inputFields.put( row.getValueMeta( i ).getName(), Integer.valueOf( i ) );
}
setComboBoxes();
// Dislay in red missing field names
Display.getDefault().asyncExec( new Runnable() {
public void run() {
if ( !wReturn.isDisposed() ) {
for ( int i = 0; i < wReturn.table.getItemCount(); i++ ) {
TableItem it = wReturn.table.getItem( i );
if ( !Const.isEmpty( it.getText( 2 ) ) ) {
if ( !inputFields.containsKey( it.getText( 2 ) ) ) {
it.setBackground( GUIResource.getInstance().getColorRed() );
}
}
}
}
}
} );
} catch ( KettleException e ) {
logError( BaseMessages.getString( PKG, "System.Dialog.GetFieldsFailed.Message" ) );
}
}
}
};
new Thread( runnable ).start();
fdGeneralComp = new FormData();
fdGeneralComp.left = new FormAttachment( 0, 0 );
fdGeneralComp.top = new FormAttachment( wStepname, margin );
fdGeneralComp.right = new FormAttachment( 100, 0 );
fdGeneralComp.bottom = new FormAttachment( 100, 0 );
wGeneralComp.setLayoutData( fdGeneralComp );
wGeneralComp.layout();
wGeneralTab.setControl( wGeneralComp );
// THE BUTTONS
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, null );
fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment( 0, 0 );
fdTabFolder.top = new FormAttachment( wStepname, margin );
fdTabFolder.right = new FormAttachment( 100, 0 );
fdTabFolder.bottom = new FormAttachment( wOK, -margin );
wTabFolder.setLayoutData( fdTabFolder );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsTest = new Listener() {
public void handleEvent( Event e ) {
test();
}
};
lsGetLU = new Listener() {
public void handleEvent( Event e ) {
getUpdate();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wGetLU.addListener( SWT.Selection, lsGetLU );
wTest.addListener( SWT.Selection, lsTest );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
wTabFolder.setSelection( 0 );
// Set the shell size, based upon previous time...
setSize();
getData( input );
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
private void getUpdate() {
try {
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null ) {
TableItemInsertListener listener = new TableItemInsertListener() {
public boolean tableItemInserted( TableItem tableItem, ValueMetaInterface v ) {
tableItem.setText( 3, "Y" );
return true;
}
};
BaseStepDialog.getFieldsFromPrevious( r, wReturn, 1, new int[] { 1, 2 }, new int[] {}, -1, -1, listener );
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "SalesforceUpsertDialog.FailedToGetFields.DialogTitle" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
private void test() {
boolean successConnection = true;
String msgError = null;
SalesforceConnection connection = null;
try {
SalesforceUpsertMeta meta = new SalesforceUpsertMeta();
getInfo( meta );
// check if the user is given
if ( !checkUser() ) {
return;
}
connection =
new SalesforceConnection( log, transMeta.environmentSubstitute( meta.getTargetURL() ), transMeta
.environmentSubstitute( meta.getUserName() ), transMeta.environmentSubstitute( meta.getPassword() ) );
connection.connect();
successConnection = true;
} catch ( Exception e ) {
successConnection = false;
msgError = e.getMessage();
} finally {
if ( connection != null ) {
try {
connection.close();
} catch ( Exception e ) { /* Ignore */
}
}
}
if ( successConnection ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "SalesforceUpsertDialog.Connected.OK", wUserName.getText() )
+ Const.CR );
mb.setText( BaseMessages.getString( PKG, "SalesforceUpsertDialog.Connected.Title.Ok" ) );
mb.open();
} else {
new ErrorDialog(
shell,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.Connected.Title.Error" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.Connected.NOK", wUserName.getText() ),
new Exception( msgError ) );
}
}
private void getFieldsList() {
try {
String selectedField = wUpsertField.getText();
wUpsertField.removeAll();
wUpsertField.setItems( getModuleFields() );
if ( !Const.isEmpty( selectedField ) ) {
wUpsertField.setText( selectedField );
}
} catch ( Exception e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ErrorRetrieveModules.DialogTitle" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ErrorRetrieveData.ErrorRetrieveModules" ), e );
}
}
/**
* Read the data from the TextFileInputMeta object and show it in this dialog.
*
* @param in
* The SalesforceUpsertMeta object to obtain the data from.
*/
public void getData( SalesforceUpsertMeta in ) {
wURL.setText( Const.NVL( in.getTargetURL(), "" ) );
wUserName.setText( Const.NVL( in.getUserName(), "" ) );
wPassword.setText( Const.NVL( in.getPassword(), "" ) );
wBatchSize.setText( in.getBatchSize() );
wModule.setText( Const.NVL( in.getModule(), "Account" ) );
wUpsertField.setText( Const.NVL( in.getUpsertField(), "Id" ) );
wBatchSize.setText( "" + in.getBatchSize() );
wSalesforceIDFieldName.setText( Const.NVL( in.getSalesforceIDFieldName(), "" ) );
if ( isDebug() ) {
logDebug( BaseMessages.getString( PKG, "SalesforceUpsertDialog.Log.GettingFieldsInfo" ) );
}
if ( input.getUpdateLookup() != null ) {
for ( int i = 0; i < input.getUpdateLookup().length; i++ ) {
TableItem item = wReturn.table.getItem( i );
if ( input.getUpdateLookup()[i] != null ) {
item.setText( 1, input.getUpdateLookup()[i] );
}
if ( input.getUpdateStream()[i] != null ) {
item.setText( 2, input.getUpdateStream()[i] );
}
if ( input.getUseExternalId()[i] == null || input.getUseExternalId()[i].booleanValue() ) {
item.setText( 3, "Y" );
} else {
item.setText( 3, "N" );
}
}
}
wReturn.removeEmptyRows();
wReturn.setRowNums();
wReturn.optWidth( true );
wTimeOut.setText( Const.NVL( in.getTimeOut(), SalesforceConnectionUtils.DEFAULT_TIMEOUT ) );
wUseCompression.setSelection( in.isUsingCompression() );
wRollbackAllChangesOnError.setSelection( in.isRollbackAllChangesOnError() );
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
try {
getInfo( input );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "SalesforceUpsertDialog.ErrorValidateData.DialogTitle" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ErrorValidateData.DialogMessage" ), e );
}
dispose();
}
private void getInfo( SalesforceUpsertMeta in ) throws KettleException {
stepname = wStepname.getText(); // return value
// copy info to SalesforceUpsertMeta class (input)
in.setTargetURL( Const.NVL( wURL.getText(), SalesforceConnectionUtils.TARGET_DEFAULT_URL ) );
in.setUserName( wUserName.getText() );
in.setPassword( wPassword.getText() );
in.setModule( Const.NVL( wModule.getText(), "Account" ) );
in.setUpsertField( Const.NVL( wUpsertField.getText(), "Id" ) );
in.setSalesforceIDFieldName( wSalesforceIDFieldName.getText() );
in.setBatchSize( wBatchSize.getText() );
int nrfields = wReturn.nrNonEmpty();
in.allocate( nrfields );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < nrfields; i++ ) {
TableItem item = wReturn.getNonEmpty( i );
in.getUpdateLookup()[i] = item.getText( 1 );
in.getUpdateStream()[i] = item.getText( 2 );
in.getUseExternalId()[i] = Boolean.valueOf( "Y".equals( item.getText( 3 ) ) );
}
in.setUseCompression( wUseCompression.getSelection() );
in.setTimeOut( Const.NVL( wTimeOut.getText(), "0" ) );
in.setRollbackAllChangesOnError( wRollbackAllChangesOnError.getSelection() );
}
// check if module, username is given
private boolean checkInput() {
if ( Const.isEmpty( wModule.getText() ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "SalesforceUpsertDialog.ModuleMissing.DialogMessage" ) );
mb.setText( BaseMessages.getString( PKG, "System.Dialog.Error.Title" ) );
mb.open();
return false;
}
return checkUser();
}
// check if module, username is given
private boolean checkUser() {
if ( Const.isEmpty( wUserName.getText() ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "SalesforceUpsertDialog.UsernameMissing.DialogMessage" ) );
mb.setText( BaseMessages.getString( PKG, "System.Dialog.Error.Title" ) );
mb.open();
return false;
}
return true;
}
private String[] getModuleFields() throws KettleException {
SalesforceUpsertMeta meta = new SalesforceUpsertMeta();
getInfo( meta );
SalesforceConnection connection = null;
String url = transMeta.environmentSubstitute( meta.getTargetURL() );
try {
String selectedModule = transMeta.environmentSubstitute( meta.getModule() );
// Define a new Salesforce connection
connection =
new SalesforceConnection( log, url, transMeta.environmentSubstitute( meta.getUserName() ), transMeta
.environmentSubstitute( meta.getPassword() ) );
int realTimeOut = Const.toInt( transMeta.environmentSubstitute( meta.getTimeOut() ), 0 );
connection.setTimeOut( realTimeOut );
// connect to Salesforce
connection.connect();
// return fieldsname for the module
return connection.getFields( selectedModule );
} catch ( Exception e ) {
throw new KettleException( "Erreur getting fields from module [" + url + "]!", e );
} finally {
if ( connection != null ) {
try {
connection.close();
} catch ( Exception e ) { /* Ignore */
}
}
}
}
/**
* Reads in the fields from the previous steps and from the ONE next step and opens an EnterMappingDialog with this
* information. After the user did the mapping, those information is put into the Select/Rename table.
*/
private void generateMappings() {
if ( !checkInput() ) {
return;
}
// Determine the source and target fields...
//
RowMetaInterface sourceFields;
RowMetaInterface targetFields = new RowMeta();
try {
sourceFields = transMeta.getPrevStepFields( stepMeta );
} catch ( KettleException e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.DoMapping.UnableToFindSourceFields.Title" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.DoMapping.UnableToFindSourceFields.Message" ), e );
return;
}
try {
String[] fields = getModuleFields();
for ( int i = 0; i < fields.length; i++ ) {
targetFields.addValueMeta( new ValueMeta( fields[i] ) );
}
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString(
PKG, "SalesforceUpsertDialog.DoMapping.UnableToFindTargetFields.Title" ), BaseMessages.getString(
PKG, "SalesforceUpsertDialog.DoMapping.UnableToFindTargetFields.Message" ), e );
return;
}
String[] inputNames = new String[sourceFields.size()];
for ( int i = 0; i < sourceFields.size(); i++ ) {
ValueMetaInterface value = sourceFields.getValueMeta( i );
inputNames[i] = value.getName() + EnterMappingDialog.STRING_ORIGIN_SEPARATOR + value.getOrigin() + ")";
}
// Create the existing mapping list...
//
List<SourceToTargetMapping> mappings = new ArrayList<SourceToTargetMapping>();
StringBuffer missingSourceFields = new StringBuffer();
StringBuffer missingTargetFields = new StringBuffer();
int nrFields = wReturn.nrNonEmpty();
for ( int i = 0; i < nrFields; i++ ) {
TableItem item = wReturn.getNonEmpty( i );
String source = item.getText( 2 );
String target = item.getText( 1 );
int sourceIndex = sourceFields.indexOfValue( source );
if ( sourceIndex < 0 ) {
missingSourceFields.append( Const.CR + " " + source + " --> " + target );
}
int targetIndex = targetFields.indexOfValue( target );
if ( targetIndex < 0 ) {
missingTargetFields.append( Const.CR + " " + source + " --> " + target );
}
if ( sourceIndex < 0 || targetIndex < 0 ) {
continue;
}
SourceToTargetMapping mapping = new SourceToTargetMapping( sourceIndex, targetIndex );
mappings.add( mapping );
}
// show a confirm dialog if some missing field was found
//
if ( missingSourceFields.length() > 0 || missingTargetFields.length() > 0 ) {
String message = "";
if ( missingSourceFields.length() > 0 ) {
message +=
BaseMessages.getString(
PKG, "SalesforceUpsertDialog.DoMapping.SomeSourceFieldsNotFound", missingSourceFields.toString() )
+ Const.CR;
}
if ( missingTargetFields.length() > 0 ) {
message +=
BaseMessages.getString(
PKG, "SalesforceUpsertDialog.DoMapping.SomeTargetFieldsNotFound", missingSourceFields.toString() )
+ Const.CR;
}
message += Const.CR;
message +=
BaseMessages.getString( PKG, "SalesforceUpsertDialog.DoMapping.SomeFieldsNotFoundContinue" ) + Const.CR;
MessageDialog.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
boolean goOn =
MessageDialog.openConfirm( shell, BaseMessages.getString(
PKG, "SalesforceUpsertDialog.DoMapping.SomeFieldsNotFoundTitle" ), message );
if ( !goOn ) {
return;
}
}
EnterMappingDialog d =
new EnterMappingDialog( SalesforceUpsertDialog.this.shell, sourceFields.getFieldNames(), targetFields
.getFieldNames(), mappings );
mappings = d.open();
// mappings == null if the user pressed cancel
//
if ( mappings != null ) {
// Clear and re-populate!
//
wReturn.table.removeAll();
wReturn.table.setItemCount( mappings.size() );
for ( int i = 0; i < mappings.size(); i++ ) {
SourceToTargetMapping mapping = mappings.get( i );
TableItem item = wReturn.table.getItem( i );
item.setText( 2, sourceFields.getValueMeta( mapping.getSourcePosition() ).getName() );
item.setText( 1, targetFields.getValueMeta( mapping.getTargetPosition() ).getName() );
}
wReturn.setRowNums();
wReturn.optWidth( true );
}
}
protected void setComboBoxes() {
// Something was changed in the row.
//
final Map<String, Integer> fields = new HashMap<String, Integer>();
// Add the currentMeta fields...
fields.putAll( inputFields );
Set<String> keySet = fields.keySet();
List<String> entries = new ArrayList<String>( keySet );
String[] fieldNames = entries.toArray( new String[entries.size()] );
Const.sortStrings( fieldNames );
// return fields
ciReturn[1].setComboValues( fieldNames );
}
private void getModulesList() {
if ( !gotModule ) {
SalesforceConnection connection = null;
try {
SalesforceUpsertMeta meta = new SalesforceUpsertMeta();
getInfo( meta );
String url = transMeta.environmentSubstitute( meta.getTargetURL() );
String selectedField = wModule.getText();
wModule.removeAll();
// Define a new Salesforce connection
connection =
new SalesforceConnection( log, url, transMeta.environmentSubstitute( meta.getUserName() ), transMeta
.environmentSubstitute( meta.getPassword() ) );
// connect to Salesforce
connection.connect();
// return
wModule.setItems( connection.getAllAvailableObjects( false ) );
if ( !Const.isEmpty( selectedField ) ) {
wModule.setText( selectedField );
}
gotModule = true;
getModulesListError = false;
} catch ( Exception e ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ErrorRetrieveModules.DialogTitle" ),
BaseMessages.getString( PKG, "SalesforceUpsertDialog.ErrorRetrieveData.ErrorRetrieveModules" ), e );
getModulesListError = true;
} finally {
if ( connection != null ) {
try {
connection.close();
} catch ( Exception e ) { /* Ignore */
}
}
}
}
}
public void setModuleFieldCombo() {
if ( gotFields ) {
return;
}
gotFields = true;
Display display = shell.getDisplay();
if ( !( display == null || display.isDisposed() ) ) {
display.asyncExec( new Runnable() {
public void run() {
// clear
for ( int i = 0; i < tableFieldColumns.size(); i++ ) {
ColumnInfo colInfo = tableFieldColumns.get( i );
colInfo.setComboValues( new String[] {} );
}
if ( wModule.isDisposed() ) {
return;
}
String selectedModule = transMeta.environmentSubstitute( wModule.getText() );
if ( !Const.isEmpty( selectedModule ) ) {
try {
// loop through the objects and find build the list of fields
String[] fieldsName = getModuleFields();
if ( fieldsName != null ) {
for ( int i = 0; i < tableFieldColumns.size(); i++ ) {
ColumnInfo colInfo = tableFieldColumns.get( i );
colInfo.setComboValues( fieldsName );
}
}
} catch ( Exception e ) {
for ( int i = 0; i < tableFieldColumns.size(); i++ ) {
ColumnInfo colInfo = tableFieldColumns.get( i );
colInfo.setComboValues( new String[] {} );
}
// ignore any errors here. drop downs will not be
// filled, but no problem for the user
}
}
}
} );
}
}
}
| |
package shef.nlp.supple.prolog.cafe;
import jp.ac.kobe_u.cs.prolog.lang.*;
import jp.ac.kobe_u.cs.prolog.builtin.*;
/*
* *** Please do not edit ! ***
* @(#) PRED_display_tdm_attributes_3.java
* @procedure display_tdm_attributes/3 in plcafe_supple_io.pl
*/
/*
* @version Prolog Cafe 0.8 November 2003
* @author Mutsunori Banbara (banbara@kobe-u.ac.jp)
* @author Naoyuki Tamura (tamura@kobe-u.ac.jp)
*/
public class PRED_display_tdm_attributes_3 extends Predicate {
static Predicate display_tdm_attributes_3_1 = new PRED_display_tdm_attributes_3_1();
static Predicate display_tdm_attributes_3_2 = new PRED_display_tdm_attributes_3_2();
static Predicate display_tdm_attributes_3_sub_1 = new PRED_display_tdm_attributes_3_sub_1();
public Term arg1, arg2, arg3;
public PRED_display_tdm_attributes_3(Term a1, Term a2, Term a3, Predicate cont) {
arg1 = a1;
arg2 = a2;
arg3 = a3;
this.cont = cont;
}
public PRED_display_tdm_attributes_3(){}
public void setArgument(Term[] args, Predicate cont) {
arg1 = args[0];
arg2 = args[1];
arg3 = args[2];
this.cont = cont;
}
public Predicate exec(Prolog engine) {
engine.aregs[1] = arg1;
engine.aregs[2] = arg2;
engine.aregs[3] = arg3;
engine.cont = cont;
return call(engine);
}
public Predicate call(Prolog engine) {
engine.setB0();
return engine.jtry(display_tdm_attributes_3_1, display_tdm_attributes_3_sub_1);
}
public int arity() { return 3; }
public String toString() {
return "display_tdm_attributes(" + arg1 + ", " + arg2 + ", " + arg3 + ")";
}
}
class PRED_display_tdm_attributes_3_sub_1 extends PRED_display_tdm_attributes_3 {
public Predicate exec(Prolog engine) {
return engine.trust(display_tdm_attributes_3_2);
}
}
class PRED_display_tdm_attributes_3_1 extends PRED_display_tdm_attributes_3 {
static SymbolTerm f1 = SymbolTerm.makeSymbol("member", 2);
static SymbolTerm f2 = SymbolTerm.makeSymbol("edge", 10);
static SymbolTerm s3 = SymbolTerm.makeSymbol("[]");
static SymbolTerm f6 = SymbolTerm.makeSymbol(",", 2);
static SymbolTerm f7 = SymbolTerm.makeSymbol("=..", 2);
static SymbolTerm f9 = SymbolTerm.makeSymbol("reverse", 2);
static SymbolTerm f11 = SymbolTerm.makeSymbol("display_tdm_attributes", 5);
static SymbolTerm f13 = SymbolTerm.makeSymbol("semantics", 4);
static SymbolTerm s14 = SymbolTerm.makeSymbol("inactive");
static SymbolTerm f16 = SymbolTerm.makeSymbol("\\=", 2);
static SymbolTerm s18 = SymbolTerm.makeSymbol("nl");
static SymbolTerm f19 = SymbolTerm.makeSymbol("foreach", 2);
static SymbolTerm f20 = SymbolTerm.makeSymbol("ne_tag", 2);
static SymbolTerm f21 = SymbolTerm.makeSymbol("offsets", 2);
static SymbolTerm f25 = SymbolTerm.makeSymbol("ne_tag", 3);
static SymbolTerm f27 = SymbolTerm.makeSymbol("write", 1);
static SymbolTerm s28 = SymbolTerm.makeSymbol("name ");
static Term[] h59 = {s28};
static StructureTerm s29 = new StructureTerm(f27, h59);
static SymbolTerm s31 = SymbolTerm.makeSymbol(" ");
static Term[] h60 = {s31};
static StructureTerm s32 = new StructureTerm(f27, h60);
static SymbolTerm s46 = SymbolTerm.makeSymbol("!");
static SymbolTerm f47 = SymbolTerm.makeSymbol("write_semantics", 1);
public Predicate exec(Prolog engine) {
Term a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31, a32, a33, a34, a35, a36, a37, a38, a39, a40, a41, a42, a43, a44, a45, a46, a47, a48, a49, a50, a51, a52, a53, a54, a55, a56;
Predicate p1, p2, p3;
a1 = engine.aregs[1].dereference();
a2 = engine.aregs[2].dereference();
a3 = engine.aregs[3].dereference();
Predicate cont = engine.cont;
a4 = new VariableTerm(engine);
a7 = new VariableTerm(engine);
a8 = new VariableTerm(engine);
a9 = new VariableTerm(engine);
a10 = new VariableTerm(engine);
a11 = new VariableTerm(engine);
Term[] h4 = {new VariableTerm(engine), new VariableTerm(engine), a7, s3, new VariableTerm(engine), a8, new VariableTerm(engine), a9, a10, a11};
a6 = new StructureTerm(f2, h4);
Term[] h5 = {a6, a2};
a5 = new StructureTerm(f1, h5);
a15 = new VariableTerm(engine);
a14 = new ListTerm(a15, new VariableTerm(engine));
Term[] h8 = {a7, a14};
a13 = new StructureTerm(f7, h8);
a18 = new VariableTerm(engine);
Term[] h10 = {a8, a18};
a17 = new StructureTerm(f9, h10);
Term[] h12 = {a15, a18, a9, a10, a3};
a20 = new StructureTerm(f11, h12);
a23 = new VariableTerm(engine);
Term[] h15 = {a15, s14, a11, a23};
a22 = new StructureTerm(f13, h15);
Term[] h17 = {a23, s3};
a25 = new StructureTerm(f16, h17);
a31 = new VariableTerm(engine);
a33 = new VariableTerm(engine);
a34 = new VariableTerm(engine);
Term[] h22 = {a33, a34};
a32 = new StructureTerm(f21, h22);
Term[] h23 = {a31, a32};
a30 = new StructureTerm(f20, h23);
Term[] h24 = {a30, a23};
a29 = new StructureTerm(f1, h24);
a37 = new VariableTerm(engine);
Term[] h26 = {a31, a23, a37};
a36 = new StructureTerm(f25, h26);
Term[] h30 = {a33};
a40 = new StructureTerm(f27, h30);
Term[] h33 = {a34};
a43 = new StructureTerm(f27, h33);
Term[] h34 = {a37};
a46 = new StructureTerm(f27, h34);
Term[] h35 = {a31};
a49 = new StructureTerm(f27, h35);
Term[] h36 = {a49, s18};
a48 = new StructureTerm(f6, h36);
Term[] h37 = {s32, a48};
a47 = new StructureTerm(f6, h37);
Term[] h38 = {a46, a47};
a45 = new StructureTerm(f6, h38);
Term[] h39 = {s32, a45};
a44 = new StructureTerm(f6, h39);
Term[] h40 = {a43, a44};
a42 = new StructureTerm(f6, h40);
Term[] h41 = {s32, a42};
a41 = new StructureTerm(f6, h41);
Term[] h42 = {a40, a41};
a39 = new StructureTerm(f6, h42);
Term[] h43 = {s29, a39};
a38 = new StructureTerm(f6, h43);
Term[] h44 = {a36, a38};
a35 = new StructureTerm(f6, h44);
Term[] h45 = {a29, a35};
a28 = new StructureTerm(f19, h45);
a56 = new ListTerm(a23, s3);
a55 = new ListTerm(a10, a56);
a54 = new ListTerm(a9, a55);
Term[] h48 = {a54};
a53 = new StructureTerm(f47, h48);
Term[] h49 = {a53, s18};
a52 = new StructureTerm(f6, h49);
Term[] h50 = {s46, a52};
a51 = new StructureTerm(f6, h50);
Term[] h51 = {s18, a51};
a50 = new StructureTerm(f6, h51);
Term[] h52 = {a28, a50};
a27 = new StructureTerm(f6, h52);
Term[] h53 = {s18, a27};
a26 = new StructureTerm(f6, h53);
Term[] h54 = {a25, a26};
a24 = new StructureTerm(f6, h54);
Term[] h55 = {a22, a24};
a21 = new StructureTerm(f6, h55);
Term[] h56 = {a20, a21};
a19 = new StructureTerm(f6, h56);
Term[] h57 = {a17, a19};
a16 = new StructureTerm(f6, h57);
Term[] h58 = {a13, a16};
a12 = new StructureTerm(f6, h58);
p1 = new PRED_$cut_1(a4, cont);
p2 = new PRED_foreach_2(a5, a12, p1);
p3 = new PRED_nonvar_1(a2, p2);
return new PRED_$get_level_1(a4, p3);
}
}
class PRED_display_tdm_attributes_3_2 extends PRED_display_tdm_attributes_3 {
public Predicate exec(Prolog engine) {
return engine.cont;
}
}
| |
package lists;
import java.util.Iterator;
/**
*
* @author etcharn1
*/
public class SortedLinkedListWithIterator<T extends Comparable<? super T>> implements SortedListInterface<T>, Iterable<T> {
private Node<T> head;
private int numberOfEntries;
public SortedLinkedListWithIterator()
{
head = new Node(null, null, null);
head.previous = head;
head.next = head;
numberOfEntries = 0;
}
@Override
public void add(T newEntry) {
Node<T> currentNode = head.next;
while( currentNode != head && newEntry.compareTo(currentNode.data) > 0 )
{
currentNode = currentNode.next;
}
Node newNode = new Node(currentNode.previous, newEntry, currentNode);
newNode.previous.next = newNode;
newNode.next.previous = newNode;
numberOfEntries++;
}
@Override
public boolean remove(T item) {
Node<T> currentNode = head.next;
while( currentNode != head)
{
if ( currentNode.data.compareTo(item) == 0 )
{
currentNode.previous.next = currentNode.next;
currentNode.next.previous = currentNode.previous;
numberOfEntries--;
return true;
}
}
return false;
}
@Override
public void clear() {
head = new Node(null, null, null);
numberOfEntries = 0;
}
@Override
public T getEntry(int givenPosition) {
if ( isValidPosition( givenPosition ) )
{
Node<T> current = walkToNode(givenPosition);
return current.data;
}
return null;
}
@Override
public boolean contains(T anEntry) {
Node current = head.next;
while ( current != head )
{
if ( current.data.equals( anEntry ) )
{
return true;
}
current = current.next;
}
return false;
}
@Override
public int getLength() {
return numberOfEntries;
}
@Override
public boolean isEmpty() {
return numberOfEntries == 0;
}
@Override
public ListIterator<T> iterator() {
return new IteratorForLinkedList<T>();
}
@Override
public int getPosition(T item) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
private class IteratorForLinkedList<T> implements ListIterator<T>
{
private Node<T> nextNode;
private Node<T> lastNodeReturned;
public IteratorForLinkedList()
{
nextNode = head.next;
lastNodeReturned = null;
}
@Override
public boolean hasNext() {
return nextNode != head;
}
@Override
public T next() {
T item = nextNode.data;
lastNodeReturned = nextNode;
nextNode = nextNode.next;
return item;
}
@Override
public T previous() {
nextNode = nextNode.previous;
lastNodeReturned = nextNode;
return nextNode.data;
}
@Override
public boolean hasPrevious() {
return nextNode.previous != head;
}
@Override
public void add(T item) {
Node nodeToAdd = new Node(nextNode.previous, item, nextNode);
nodeToAdd.previous.next = nodeToAdd;
nodeToAdd.next.previous = nodeToAdd;
lastNodeReturned = null;
numberOfEntries++;
}
@Override
public void remove() {
if ( lastNodeReturned != null )
{
lastNodeReturned.previous.next = lastNodeReturned.next;
lastNodeReturned.next.previous = lastNodeReturned.previous;
lastNodeReturned = null;
numberOfEntries--;
}
}
}
private class Node<T>
{
T data;
Node previous;
Node next;
public Node(Node previous, T data, Node next)
{
this.previous = previous;
this.data = data;
this.next = next;
}
}
private boolean isValidPosition( int givenPosition )
{
// would be better to throw exception here if the position is invalid
return givenPosition >= 0 && givenPosition <= numberOfEntries;
}
private Node<T> walkToNode( int givenPosition )
{
if ( isValidPosition( givenPosition ) )
{
Node current;
// closer to the first
if ( givenPosition < numberOfEntries / 2 )
{
current = head.next;
int currentPosition = 0;
while ( currentPosition < givenPosition )
{
current = current.next;
currentPosition++;
}
}
// closer to the last
else
{
current = head;
int currentPosition = numberOfEntries;
while ( givenPosition < currentPosition )
{
current = current.previous;
currentPosition--;
}
}
return current;
}
return null;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.search.highlight.Encoder;
import org.apache.lucene.search.vectorhighlight.BaseFragmentsBuilder;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.apache.lucene.search.vectorhighlight.BreakIteratorBoundaryScanner;
import org.apache.lucene.search.vectorhighlight.CustomFieldQuery;
import org.apache.lucene.search.vectorhighlight.FieldFragList;
import org.apache.lucene.search.vectorhighlight.FieldPhraseList.WeightedPhraseInfo;
import org.apache.lucene.search.vectorhighlight.FieldQuery;
import org.apache.lucene.search.vectorhighlight.FragListBuilder;
import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder;
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
import org.apache.lucene.search.vectorhighlight.SimpleFieldFragList;
import org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder;
import org.apache.lucene.search.vectorhighlight.SingleFragListBuilder;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.internal.SearchContext;
import java.text.BreakIterator;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
public class FastVectorHighlighter implements Highlighter {
private static final BoundaryScanner DEFAULT_SIMPLE_BOUNDARY_SCANNER = new SimpleBoundaryScanner();
private static final BoundaryScanner DEFAULT_SENTENCE_BOUNDARY_SCANNER =
new BreakIteratorBoundaryScanner(BreakIterator.getSentenceInstance(Locale.ROOT));
private static final BoundaryScanner DEFAULT_WORD_BOUNDARY_SCANNER =
new BreakIteratorBoundaryScanner(BreakIterator.getWordInstance(Locale.ROOT));
public static final Setting<Boolean> SETTING_TV_HIGHLIGHT_MULTI_VALUE =
Setting.boolSetting("search.highlight.term_vector_multi_value", true, Setting.Property.NodeScope);
private static final String CACHE_KEY = "highlight-fsv";
private final Boolean termVectorMultiValue;
public FastVectorHighlighter(Settings settings) {
this.termVectorMultiValue = SETTING_TV_HIGHLIGHT_MULTI_VALUE.get(settings);
}
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
SearchContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
FieldMapper mapper = highlighterContext.mapper;
if (canHighlight(mapper) == false) {
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName +
"] should be indexed with term vector with position offsets to be used with fast vector highlighter");
}
Encoder encoder = field.fieldOptions().encoder().equals("html") ?
HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
if (!hitContext.cache().containsKey(CACHE_KEY)) {
hitContext.cache().put(CACHE_KEY, new HighlighterEntry());
}
HighlighterEntry cache = (HighlighterEntry) hitContext.cache().get(CACHE_KEY);
try {
MapperHighlightEntry entry = cache.mappers.get(mapper);
if (entry == null) {
FragListBuilder fragListBuilder;
BaseFragmentsBuilder fragmentsBuilder;
final BoundaryScanner boundaryScanner = getBoundaryScanner(field);
boolean forceSource = context.highlight().forceSource(field);
if (field.fieldOptions().numberOfFragments() == 0) {
fragListBuilder = new SingleFragListBuilder();
if (!forceSource && mapper.fieldType().stored()) {
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context,
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
} else {
fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ?
new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset());
if (field.fieldOptions().scoreOrdered()) {
if (!forceSource && mapper.fieldType().stored()) {
fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context,
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
} else {
if (!forceSource && mapper.fieldType().stored()) {
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder =
new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
}
}
}
fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue);
entry = new MapperHighlightEntry();
if (field.fieldOptions().requireFieldMatch()) {
/**
* we use top level reader to rewrite the query against all readers,
* with use caching it across hits (and across readers...)
*/
entry.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query,
hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch());
} else {
/**
* we use top level reader to rewrite the query against all readers,
* with use caching it across hits (and across readers...)
*/
entry.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query,
hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch());
}
entry.fragListBuilder = fragListBuilder;
entry.fragmentsBuilder = fragmentsBuilder;
if (cache.fvh == null) {
// parameters to FVH are not requires since:
// first two booleans are not relevant since they are set on the CustomFieldQuery
// (phrase and fieldMatch) fragment builders are used explicitly
cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter();
}
CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter());
cache.mappers.put(mapper, entry);
}
final FieldQuery fieldQuery;
if (field.fieldOptions().requireFieldMatch()) {
fieldQuery = entry.fieldMatchFieldQuery;
} else {
fieldQuery = entry.noFieldMatchFieldQuery;
}
cache.fvh.setPhraseLimit(field.fieldOptions().phraseLimit());
String[] fragments;
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ?
Integer.MAX_VALUE : field.fieldOptions().numberOfFragments();
int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ?
Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize();
// we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible
// Only send matched fields if they were requested to save time.
if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) {
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(),
mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize,
numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), encoder);
} else {
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(),
mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder,
entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
}
if (fragments != null && fragments.length > 0) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
}
int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize();
if (noMatchSize > 0) {
// Essentially we just request that a fragment is built from 0 to noMatchSize using
// the normal fragmentsBuilder
FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/);
fieldFragList.add(0, noMatchSize, Collections.<WeightedPhraseInfo>emptyList());
fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(),
mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), encoder);
if (fragments != null && fragments.length > 0) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
}
}
return null;
} catch (Exception e) {
throw new FetchPhaseExecutionException(context,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
}
}
@Override
public boolean canHighlight(FieldMapper fieldMapper) {
return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets()
&& fieldMapper.fieldType().storeTermVectorPositions();
}
private static BoundaryScanner getBoundaryScanner(Field field) {
final FieldOptions fieldOptions = field.fieldOptions();
final Locale boundaryScannerLocale =
fieldOptions.boundaryScannerLocale() != null ? fieldOptions.boundaryScannerLocale() :
Locale.ROOT;
final HighlightBuilder.BoundaryScannerType type =
fieldOptions.boundaryScannerType() != null ? fieldOptions.boundaryScannerType() :
HighlightBuilder.BoundaryScannerType.CHARS;
switch(type) {
case SENTENCE:
if (boundaryScannerLocale != null) {
return new BreakIteratorBoundaryScanner(BreakIterator.getSentenceInstance(boundaryScannerLocale));
}
return DEFAULT_SENTENCE_BOUNDARY_SCANNER;
case WORD:
if (boundaryScannerLocale != null) {
return new BreakIteratorBoundaryScanner(BreakIterator.getWordInstance(boundaryScannerLocale));
}
return DEFAULT_WORD_BOUNDARY_SCANNER;
case CHARS:
if (fieldOptions.boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN
|| fieldOptions.boundaryChars() != SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) {
return new SimpleBoundaryScanner(fieldOptions.boundaryMaxScan(), fieldOptions.boundaryChars());
}
return DEFAULT_SIMPLE_BOUNDARY_SCANNER;
default:
throw new IllegalArgumentException("Invalid boundary scanner type: " + type.toString());
}
}
private class MapperHighlightEntry {
public FragListBuilder fragListBuilder;
public FragmentsBuilder fragmentsBuilder;
public FieldQuery noFieldMatchFieldQuery;
public FieldQuery fieldMatchFieldQuery;
}
private class HighlighterEntry {
public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh;
public Map<FieldMapper, MapperHighlightEntry> mappers = new HashMap<>();
}
}
| |
/*
* Copyright 2014 Red Hat, Inc.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
/*
* Copyright (c) 2011-2013 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.vertx.ext.web.handler.sockjs.impl;
import io.vertx.core.Handler;
import io.vertx.core.MultiMap;
import io.vertx.core.Vertx;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.json.DecodeException;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
import io.vertx.core.net.SocketAddress;
import io.vertx.core.shareddata.LocalMap;
import io.vertx.core.shareddata.Shareable;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.sockjs.SockJSSocket;
import java.util.LinkedList;
import java.util.Queue;
import static io.vertx.core.buffer.Buffer.buffer;
/**
* The SockJS session implementation.
*
* If multiple instances of the SockJS server are used then instances of this
* class can be accessed by different threads (not concurrently), so we store
* it in a shared data map
*
* @author <a href="http://tfox.org">Tim Fox</a>
*/
class SockJSSession extends SockJSSocketBase implements Shareable {
private static final Logger log = LoggerFactory.getLogger(SockJSSession.class);
private final LocalMap<String, SockJSSession> sessions;
private final Queue<String> pendingWrites = new LinkedList<>();
private final Queue<String> pendingReads = new LinkedList<>();
private TransportListener listener;
private Handler<Buffer> dataHandler;
private boolean closed;
private boolean openWritten;
private final String id;
private final long timeout;
private final Handler<SockJSSocket> sockHandler;
private long heartbeatID = -1;
private long timeoutTimerID = -1;
private boolean paused;
private int maxQueueSize = 64 * 1024; // Message queue size is measured in *characters* (not bytes)
private int messagesSize;
private Handler<Void> drainHandler;
private Handler<Void> endHandler;
private Handler<Throwable> exceptionHandler;
private boolean handleCalled;
private SocketAddress localAddress;
private SocketAddress remoteAddress;
private String uri;
private MultiMap headers;
SockJSSession(Vertx vertx, LocalMap<String, SockJSSession> sessions, RoutingContext rc, long heartbeatInterval,
Handler<SockJSSocket> sockHandler) {
this(vertx, sessions, rc, null, -1, heartbeatInterval, sockHandler);
}
SockJSSession(Vertx vertx, LocalMap<String, SockJSSession> sessions, RoutingContext rc, String id, long timeout, long heartbeatInterval,
Handler<SockJSSocket> sockHandler) {
super(vertx, rc.session(), rc.user());
this.sessions = sessions;
this.id = id;
this.timeout = timeout;
this.sockHandler = sockHandler;
// Start a heartbeat
heartbeatID = vertx.setPeriodic(heartbeatInterval, tid -> {
if (listener != null) {
listener.sendFrame("h");
}
});
}
@Override
public synchronized SockJSSocket write(Buffer buffer) {
String msgStr = buffer.toString();
pendingWrites.add(msgStr);
this.messagesSize += msgStr.length();
if (listener != null) {
writePendingMessages();
}
return this;
}
@Override
public synchronized SockJSSession handler(Handler<Buffer> handler) {
this.dataHandler = handler;
return this;
}
@Override
public synchronized SockJSSession pause() {
paused = true;
return this;
}
@Override
public synchronized SockJSSession resume() {
paused = false;
if (dataHandler != null) {
for (String msg: this.pendingReads) {
dataHandler.handle(buffer(msg));
}
}
return this;
}
@Override
public synchronized SockJSSession setWriteQueueMaxSize(int maxQueueSize) {
if (maxQueueSize < 1) {
throw new IllegalArgumentException("maxQueueSize must be >= 1");
}
this.maxQueueSize = maxQueueSize;
return this;
}
@Override
public synchronized boolean writeQueueFull() {
return messagesSize >= maxQueueSize;
}
@Override
public synchronized SockJSSession drainHandler(Handler<Void> handler) {
this.drainHandler = handler;
return this;
}
@Override
public synchronized SockJSSession exceptionHandler(Handler<Throwable> handler) {
this.exceptionHandler = handler;
return this;
}
@Override
public synchronized SockJSSession endHandler(Handler<Void> endHandler) {
this.endHandler = endHandler;
return this;
}
public synchronized void shutdown() {
doClose();
}
// When the user calls close() we don't actually close the session - unless it's a websocket one
// Yes, SockJS is weird, but it's hard to work out expected server behaviour when there's no spec
@Override
public synchronized void close() {
if (endHandler != null) {
endHandler.handle(null);
}
closed = true;
if (listener != null && handleCalled) {
listener.sessionClosed();
}
}
@Override
public SocketAddress remoteAddress() {
return remoteAddress;
}
@Override
public SocketAddress localAddress() {
return localAddress;
}
@Override
public MultiMap headers() {
return headers;
}
@Override
public String uri() {
return uri;
}
synchronized boolean isClosed() {
return closed;
}
synchronized void resetListener() {
listener = null;
// We set a timer that will kick in and close the session if the client doesn't come back
// We MUST ALWAYS do this or we can get a memory leak on the server
setTimer();
}
private void cancelTimer() {
if (timeoutTimerID != -1) {
vertx.cancelTimer(timeoutTimerID);
}
}
private void setTimer() {
if (timeout != -1) {
cancelTimer();
timeoutTimerID = vertx.setTimer(timeout, new Handler<Long>() {
public void handle(Long id) {
vertx.cancelTimer(heartbeatID);
if (listener == null) {
shutdown();
}
if (listener != null) {
listener.close();
}
}
});
}
}
synchronized void writePendingMessages() {
String json = JsonCodec.encode(pendingWrites.toArray());
listener.sendFrame("a" + json);
pendingWrites.clear();
messagesSize = 0;
if (drainHandler != null && messagesSize <= maxQueueSize / 2) {
Handler<Void> dh = drainHandler;
drainHandler = null;
dh.handle(null);
}
}
synchronized void register(final TransportListener lst) {
if (closed) {
// Closed by the application
writeClosed(lst);
// And close the listener request
lst.close();
} else if (this.listener != null) {
writeClosed(lst, 2010, "Another connection still open");
// And close the listener request
lst.close();
} else {
cancelTimer();
this.listener = lst;
if (!openWritten) {
writeOpen(lst);
sockHandler.handle(this);
handleCalled = true;
}
if (listener != null) {
if (closed) {
// Could have already been closed by the user
writeClosed(lst);
listener = null;
lst.close();
} else {
if (!pendingWrites.isEmpty()) {
writePendingMessages();
}
}
}
}
}
// Actually close the session - when the user calls close() the session actually continues to exist until timeout
// Yes, I know it's weird but that's the way SockJS likes it.
private void doClose() {
super.close(); // We must call this or handlers don't get unregistered and we get a leak
if (heartbeatID != -1) {
vertx.cancelTimer(heartbeatID);
}
if (timeoutTimerID != -1) {
vertx.cancelTimer(timeoutTimerID);
}
if (id != null) {
// Can be null if websocket session
sessions.remove(id);
}
if (!closed) {
closed = true;
if (endHandler != null) {
endHandler.handle(null);
}
}
}
private String[] parseMessageString(String msgs) {
try {
String[] parts;
if (msgs.startsWith("[")) {
//JSON array
parts = (String[])JsonCodec.decodeValue(msgs, String[].class);
} else {
//JSON string
String str = (String)JsonCodec.decodeValue(msgs, String.class);
parts = new String[] { str };
}
return parts;
} catch (DecodeException e) {
return null;
}
}
boolean handleMessages(String messages) {
String[] msgArr = parseMessageString(messages);
if (msgArr == null) {
return false;
} else {
if (dataHandler != null) {
for (String msg : msgArr) {
if (!paused) {
try {
dataHandler.handle(buffer(msg));
} catch (Throwable t) {
log.error("Unhandle exception", t);
}
} else {
pendingReads.add(msg);
}
}
}
return true;
}
}
void handleException(Throwable t) {
if (exceptionHandler != null) {
exceptionHandler.handle(t);
} else {
log.error("Unhandled exception", t);
}
}
public void writeClosed(TransportListener lst) {
writeClosed(lst, 3000, "Go away!");
}
private void writeClosed(TransportListener lst, int code, String msg) {
StringBuilder sb = new StringBuilder("c[");
sb.append(String.valueOf(code)).append(",\"");
sb.append(msg).append("\"]");
lst.sendFrame(sb.toString());
}
private void writeOpen(TransportListener lst) {
StringBuilder sb = new StringBuilder("o");
lst.sendFrame(sb.toString());
openWritten = true;
}
void setInfo(SocketAddress localAddress, SocketAddress remoteAddress, String uri,
MultiMap headers) {
this.localAddress = localAddress;
this.remoteAddress = remoteAddress;
this.uri = uri;
this.headers = BaseTransport.removeCookieHeaders(headers);
}
}
| |
package org.docksidestage.hanger.dbflute.cbean.cq.bs;
import java.util.Map;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.*;
import org.dbflute.cbean.coption.*;
import org.dbflute.cbean.cvalue.ConditionValue;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.exception.IllegalConditionBeanOperationException;
import org.docksidestage.hanger.dbflute.cbean.cq.ciq.*;
import org.docksidestage.hanger.dbflute.cbean.*;
import org.docksidestage.hanger.dbflute.cbean.cq.*;
/**
* The base condition-query of VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF.
* @author DBFlute(AutoGenerator)
*/
public class BsVendorTheLongAndWindingTableAndColumnRefCQ extends AbstractBsVendorTheLongAndWindingTableAndColumnRefCQ {
// ===================================================================================
// Attribute
// =========
protected VendorTheLongAndWindingTableAndColumnRefCIQ _inlineQuery;
// ===================================================================================
// Constructor
// ===========
public BsVendorTheLongAndWindingTableAndColumnRefCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) {
super(referrerQuery, sqlClause, aliasName, nestLevel);
}
// ===================================================================================
// InlineView/OrClause
// ===================
/**
* Prepare InlineView query. <br>
* {select ... from ... left outer join (select * from VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF) where FOO = [value] ...}
* <pre>
* cb.query().queryMemberStatus().<span style="color: #CC4747">inline()</span>.setFoo...;
* </pre>
* @return The condition-query for InlineView query. (NotNull)
*/
public VendorTheLongAndWindingTableAndColumnRefCIQ inline() {
if (_inlineQuery == null) { _inlineQuery = xcreateCIQ(); }
_inlineQuery.xsetOnClause(false); return _inlineQuery;
}
protected VendorTheLongAndWindingTableAndColumnRefCIQ xcreateCIQ() {
VendorTheLongAndWindingTableAndColumnRefCIQ ciq = xnewCIQ();
ciq.xsetBaseCB(_baseCB);
return ciq;
}
protected VendorTheLongAndWindingTableAndColumnRefCIQ xnewCIQ() {
return new VendorTheLongAndWindingTableAndColumnRefCIQ(xgetReferrerQuery(), xgetSqlClause(), xgetAliasName(), xgetNestLevel(), this);
}
/**
* Prepare OnClause query. <br>
* {select ... from ... left outer join VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF on ... and FOO = [value] ...}
* <pre>
* cb.query().queryMemberStatus().<span style="color: #CC4747">on()</span>.setFoo...;
* </pre>
* @return The condition-query for OnClause query. (NotNull)
* @throws IllegalConditionBeanOperationException When this condition-query is base query.
*/
public VendorTheLongAndWindingTableAndColumnRefCIQ on() {
if (isBaseQuery()) { throw new IllegalConditionBeanOperationException("OnClause for local table is unavailable!"); }
VendorTheLongAndWindingTableAndColumnRefCIQ inlineQuery = inline(); inlineQuery.xsetOnClause(true); return inlineQuery;
}
// ===================================================================================
// Query
// =====
protected ConditionValue _theLongAndWindingTableAndColumnRefId;
public ConditionValue xdfgetTheLongAndWindingTableAndColumnRefId()
{ if (_theLongAndWindingTableAndColumnRefId == null) { _theLongAndWindingTableAndColumnRefId = nCV(); }
return _theLongAndWindingTableAndColumnRefId; }
protected ConditionValue xgetCValueTheLongAndWindingTableAndColumnRefId() { return xdfgetTheLongAndWindingTableAndColumnRefId(); }
/**
* Add order-by as ascend. <br>
* THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_TheLongAndWindingTableAndColumnRefId_Asc() { regOBA("THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"); return this; }
/**
* Add order-by as descend. <br>
* THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID: {PK, NotNull, BIGINT(19)}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_TheLongAndWindingTableAndColumnRefId_Desc() { regOBD("THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID"); return this; }
protected ConditionValue _theLongAndWindingTableAndColumnId;
public ConditionValue xdfgetTheLongAndWindingTableAndColumnId()
{ if (_theLongAndWindingTableAndColumnId == null) { _theLongAndWindingTableAndColumnId = nCV(); }
return _theLongAndWindingTableAndColumnId; }
protected ConditionValue xgetCValueTheLongAndWindingTableAndColumnId() { return xdfgetTheLongAndWindingTableAndColumnId(); }
/**
* Add order-by as ascend. <br>
* THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_TheLongAndWindingTableAndColumnId_Asc() { regOBA("THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID"); return this; }
/**
* Add order-by as descend. <br>
* THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID: {IX, NotNull, BIGINT(19), FK to VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_TheLongAndWindingTableAndColumnId_Desc() { regOBD("THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID"); return this; }
protected ConditionValue _theLongAndWindingTableAndColumnRefDate;
public ConditionValue xdfgetTheLongAndWindingTableAndColumnRefDate()
{ if (_theLongAndWindingTableAndColumnRefDate == null) { _theLongAndWindingTableAndColumnRefDate = nCV(); }
return _theLongAndWindingTableAndColumnRefDate; }
protected ConditionValue xgetCValueTheLongAndWindingTableAndColumnRefDate() { return xdfgetTheLongAndWindingTableAndColumnRefDate(); }
/**
* Add order-by as ascend. <br>
* THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(8)}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_TheLongAndWindingTableAndColumnRefDate_Asc() { regOBA("THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE"); return this; }
/**
* Add order-by as descend. <br>
* THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE: {NotNull, DATE(8)}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_TheLongAndWindingTableAndColumnRefDate_Desc() { regOBD("THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE"); return this; }
protected ConditionValue _shortDate;
public ConditionValue xdfgetShortDate()
{ if (_shortDate == null) { _shortDate = nCV(); }
return _shortDate; }
protected ConditionValue xgetCValueShortDate() { return xdfgetShortDate(); }
/**
* Add order-by as ascend. <br>
* SHORT_DATE: {NotNull, DATE(8)}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_ShortDate_Asc() { regOBA("SHORT_DATE"); return this; }
/**
* Add order-by as descend. <br>
* SHORT_DATE: {NotNull, DATE(8)}
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addOrderBy_ShortDate_Desc() { regOBD("SHORT_DATE"); return this; }
// ===================================================================================
// SpecifiedDerivedOrderBy
// =======================
/**
* Add order-by for specified derived column as ascend.
* <pre>
* cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
* public void query(PurchaseCB subCB) {
* subCB.specify().columnPurchaseDatetime();
* }
* }, <span style="color: #CC4747">aliasName</span>);
* <span style="color: #3F7E5E">// order by [alias-name] asc</span>
* cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Asc</span>(<span style="color: #CC4747">aliasName</span>);
* </pre>
* @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull)
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addSpecifiedDerivedOrderBy_Asc(String aliasName) { registerSpecifiedDerivedOrderBy_Asc(aliasName); return this; }
/**
* Add order-by for specified derived column as descend.
* <pre>
* cb.specify().derivedPurchaseList().max(new SubQuery<PurchaseCB>() {
* public void query(PurchaseCB subCB) {
* subCB.specify().columnPurchaseDatetime();
* }
* }, <span style="color: #CC4747">aliasName</span>);
* <span style="color: #3F7E5E">// order by [alias-name] desc</span>
* cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Desc</span>(<span style="color: #CC4747">aliasName</span>);
* </pre>
* @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull)
* @return this. (NotNull)
*/
public BsVendorTheLongAndWindingTableAndColumnRefCQ addSpecifiedDerivedOrderBy_Desc(String aliasName) { registerSpecifiedDerivedOrderBy_Desc(aliasName); return this; }
// ===================================================================================
// Union Query
// ===========
public void reflectRelationOnUnionQuery(ConditionQuery bqs, ConditionQuery uqs) {
VendorTheLongAndWindingTableAndColumnRefCQ bq = (VendorTheLongAndWindingTableAndColumnRefCQ)bqs;
VendorTheLongAndWindingTableAndColumnRefCQ uq = (VendorTheLongAndWindingTableAndColumnRefCQ)uqs;
if (bq.hasConditionQueryVendorTheLongAndWindingTableAndColumn()) {
uq.queryVendorTheLongAndWindingTableAndColumn().reflectRelationOnUnionQuery(bq.queryVendorTheLongAndWindingTableAndColumn(), uq.queryVendorTheLongAndWindingTableAndColumn());
}
}
// ===================================================================================
// Foreign Query
// =============
/**
* Get the condition-query for relation table. <br>
* VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN by my THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID, named 'vendorTheLongAndWindingTableAndColumn'.
* @return The instance of condition-query. (NotNull)
*/
public VendorTheLongAndWindingTableAndColumnCQ queryVendorTheLongAndWindingTableAndColumn() {
return xdfgetConditionQueryVendorTheLongAndWindingTableAndColumn();
}
public VendorTheLongAndWindingTableAndColumnCQ xdfgetConditionQueryVendorTheLongAndWindingTableAndColumn() {
String prop = "vendorTheLongAndWindingTableAndColumn";
if (!xhasQueRlMap(prop)) { xregQueRl(prop, xcreateQueryVendorTheLongAndWindingTableAndColumn()); xsetupOuterJoinVendorTheLongAndWindingTableAndColumn(); }
return xgetQueRlMap(prop);
}
protected VendorTheLongAndWindingTableAndColumnCQ xcreateQueryVendorTheLongAndWindingTableAndColumn() {
String nrp = xresolveNRP("VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF", "vendorTheLongAndWindingTableAndColumn"); String jan = xresolveJAN(nrp, xgetNNLvl());
return xinitRelCQ(new VendorTheLongAndWindingTableAndColumnCQ(this, xgetSqlClause(), jan, xgetNNLvl()), _baseCB, "vendorTheLongAndWindingTableAndColumn", nrp);
}
protected void xsetupOuterJoinVendorTheLongAndWindingTableAndColumn() { xregOutJo("vendorTheLongAndWindingTableAndColumn"); }
public boolean hasConditionQueryVendorTheLongAndWindingTableAndColumn() { return xhasQueRlMap("vendorTheLongAndWindingTableAndColumn"); }
protected Map<String, Object> xfindFixedConditionDynamicParameterMap(String property) {
return null;
}
// ===================================================================================
// ScalarCondition
// ===============
public Map<String, VendorTheLongAndWindingTableAndColumnRefCQ> xdfgetScalarCondition() { return xgetSQueMap("scalarCondition"); }
public String keepScalarCondition(VendorTheLongAndWindingTableAndColumnRefCQ sq) { return xkeepSQue("scalarCondition", sq); }
// ===================================================================================
// MyselfDerived
// =============
public Map<String, VendorTheLongAndWindingTableAndColumnRefCQ> xdfgetSpecifyMyselfDerived() { return xgetSQueMap("specifyMyselfDerived"); }
public String keepSpecifyMyselfDerived(VendorTheLongAndWindingTableAndColumnRefCQ sq) { return xkeepSQue("specifyMyselfDerived", sq); }
public Map<String, VendorTheLongAndWindingTableAndColumnRefCQ> xdfgetQueryMyselfDerived() { return xgetSQueMap("queryMyselfDerived"); }
public String keepQueryMyselfDerived(VendorTheLongAndWindingTableAndColumnRefCQ sq) { return xkeepSQue("queryMyselfDerived", sq); }
public Map<String, Object> xdfgetQueryMyselfDerivedParameter() { return xgetSQuePmMap("queryMyselfDerived"); }
public String keepQueryMyselfDerivedParameter(Object pm) { return xkeepSQuePm("queryMyselfDerived", pm); }
// ===================================================================================
// MyselfExists
// ============
protected Map<String, VendorTheLongAndWindingTableAndColumnRefCQ> _myselfExistsMap;
public Map<String, VendorTheLongAndWindingTableAndColumnRefCQ> xdfgetMyselfExists() { return xgetSQueMap("myselfExists"); }
public String keepMyselfExists(VendorTheLongAndWindingTableAndColumnRefCQ sq) { return xkeepSQue("myselfExists", sq); }
// ===================================================================================
// MyselfInScope
// =============
public Map<String, VendorTheLongAndWindingTableAndColumnRefCQ> xdfgetMyselfInScope() { return xgetSQueMap("myselfInScope"); }
public String keepMyselfInScope(VendorTheLongAndWindingTableAndColumnRefCQ sq) { return xkeepSQue("myselfInScope", sq); }
// ===================================================================================
// Very Internal
// =============
// very internal (for suppressing warn about 'Not Use Import')
protected String xCB() { return VendorTheLongAndWindingTableAndColumnRefCB.class.getName(); }
protected String xCQ() { return VendorTheLongAndWindingTableAndColumnRefCQ.class.getName(); }
protected String xCHp() { return HpQDRFunction.class.getName(); }
protected String xCOp() { return ConditionOption.class.getName(); }
protected String xMap() { return Map.class.getName(); }
}
| |
/*
* Copyright 2010 BigData.mx
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mx.bigdata.sat.cfd;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.security.PrivateKey;
import java.security.Signature;
import java.security.SignatureException;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.util.JAXBSource;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Result;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import mx.bigdata.sat.cfd.schema.Comprobante;
import mx.bigdata.sat.common.URIResolverImpl;
import mx.bigdata.sat.common.NamespacePrefixMapperImpl;
import mx.bigdata.sat.security.KeyLoaderEnumeration;
import mx.bigdata.sat.security.factory.KeyLoaderFactory;
import org.apache.commons.codec.binary.Base64;
import org.w3c.dom.Document;
import org.xml.sax.ErrorHandler;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import javax.xml.transform.Source;
public final class CFDv2 implements CFD2 {
private static final String XSLT = "/xslt/cadenaoriginal_2_0.xslt";
private static final String[] XSD = new String[] {
"/xsd/v2/cfdv2.xsd",
"/xsd/common/detallista/detallista.xsd",
"/xsd/common/divisas/divisas.xsd",
"/xsd/common/donat/v10/donat.xsd",
"/xsd/common/ecb/ecb.xsd",
"/xsd/common/ecc/ecc.xsd",
"/xsd/common/iedu/iedu.xsd",
"/xsd/common/implocal/implocal.xsd",
"/xsd/common/leyendasFisc/leyendasFisc.xsd",
"/xsd/common/pfic/pfic.xsd",
"/xsd/common/psgcfdsp/psgcfdsp.xsd",
"/xsd/common/psgecfd/psgecfd.xsd",
"/xsd/common/ventavehiculos/v10/ventavehiculos.xsd"
};
//private static final String XSD = "/xsd/v2/cfdv2.xsd";
private static final String XML_HEADER =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
private static final String BASE_CONTEXT = "mx.bigdata.sat.cfd.schema";
private final static Joiner JOINER = Joiner.on(':');
private final JAXBContext context;
private TransformerFactory tf;
public static final ImmutableMap<String, String> PREFIXES =
ImmutableMap.of("http://www.w3.org/2001/XMLSchema-instance","xsi",
"http://www.sat.gob.mx/cfd/2", "");
private final Map<String, String> localPrefixes = Maps.newHashMap(PREFIXES);
final Comprobante document;
public CFDv2(InputStream in, String... contexts) throws Exception {
this.context = getContext(contexts);
this.document = load(in);
}
public CFDv2(Comprobante comprobante, String... contexts) throws Exception {
this.context = getContext(contexts);
this.document = copy(comprobante);
}
public void addNamespace(String uri, String prefix) {
localPrefixes.put(uri, prefix);
}
public void setTransformerFactory(TransformerFactory tf) {
this.tf = tf;
tf.setURIResolver(new URIResolverImpl());
}
public void sellar(PrivateKey key, X509Certificate cert) throws Exception {
cert.checkValidity();
String signature = getSignature(key);
document.setSello(signature);
byte[] bytes = cert.getEncoded();
Base64 b64 = new Base64(-1);
String certStr = b64.encodeToString(bytes);
document.setCertificado(certStr);
BigInteger bi = cert.getSerialNumber();
document.setNoCertificado(new String(bi.toByteArray()));
}
public Comprobante sellarComprobante(PrivateKey key, X509Certificate cert)
throws Exception {
sellar(key, cert);
return getComprobante();
}
public void validar() throws Exception {
validar(null);
}
public void validar(ErrorHandler handler) throws Exception {
SchemaFactory sf =
SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Source[] schemas = new Source[XSD.length];
for (int i = 0; i < XSD.length; i++) {
schemas[i] = new StreamSource(getClass().getResourceAsStream(XSD[i]));
}
Schema schema = sf.newSchema(schemas);
Validator validator = schema.newValidator();
if (handler != null) {
validator.setErrorHandler(handler);
}
validator.validate(new JAXBSource(context, document));
}
// public void validar(ErrorHandler handler) throws Exception {
// SchemaFactory sf =
// SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
// Schema schema = sf.newSchema(getClass().getResource(XSD));
// Validator validator = schema.newValidator();
// if (handler != null) {
// validator.setErrorHandler(handler);
// }
// validator.validate(new JAXBSource(context, document));
// }
public void verificar() throws Exception {
String certStr = document.getCertificado();
Base64 b64 = new Base64();
byte[] cbs = b64.decode(certStr);
X509Certificate cert = KeyLoaderFactory.createInstance(
KeyLoaderEnumeration.PUBLIC_KEY_LOADER,
new ByteArrayInputStream(cbs)
).getKey();
verificar(cert);
}
public void verificar(InputStream in) throws Exception{
String certStr = document.getCertificado();
Base64 b64 = new Base64();
byte[] cbs = b64.decode(certStr);
X509Certificate cert = KeyLoaderFactory.createInstance(
KeyLoaderEnumeration.PUBLIC_KEY_LOADER,
new ByteArrayInputStream(cbs)
).getKey();
String sigStr = document.getSello();
byte[] signature = b64.decode(sigStr);
byte[] bytes = getOriginalBytes(in);
boolean md5 = true;
if (getYear() < 2011) {
Signature sig = Signature.getInstance("MD5withRSA");
sig.initVerify(cert);
sig.update(bytes);
try {
sig.verify(signature);
} catch (SignatureException e){
md5 = false;
}
}
if (getYear() > 2010 || !md5) {
Signature sig = Signature.getInstance("SHA1withRSA");
sig.initVerify(cert);
sig.update(bytes);
boolean bool = sig.verify(signature);
if (!bool) {
throw new Exception("Sellado invalido.");
}
}
}
public void verificar(Certificate cert) throws Exception {
String sigStr = document.getSello();
Base64 b64 = new Base64();
byte[] signature = b64.decode(sigStr);
byte[] bytes = getOriginalBytes();
boolean md5 = true;
if (getYear() < 2011) {
Signature sig = Signature.getInstance("MD5withRSA");
sig.initVerify(cert);
sig.update(bytes);
try {
sig.verify(signature);
} catch (SignatureException e){
// Not MD5
md5 = false;
}
}
if (getYear() > 2010 || !md5) {
Signature sig = Signature.getInstance("SHA1withRSA");
sig.initVerify(cert);
sig.update(bytes);
boolean bool = sig.verify(signature);
if (!bool) {
throw new Exception("Invalid signature");
}
}
}
public void guardar(OutputStream out) throws Exception {
Marshaller m = context.createMarshaller();
m.setProperty("com.sun.xml.bind.namespacePrefixMapper",
new NamespacePrefixMapperImpl(localPrefixes));
m.setProperty(Marshaller.JAXB_FRAGMENT, Boolean.TRUE);
m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
m.setProperty(Marshaller.JAXB_SCHEMA_LOCATION,
"http://www.sat.gob.mx/cfd/2 "
+ "http://www.sat.gob.mx/sitio_internet/cfd/2/cfdv2.xsd");
byte[] xmlHeaderBytes = XML_HEADER.getBytes("UTF8");
out.write(xmlHeaderBytes);
m.marshal(document, out);
}
public String getCadenaOriginal() throws Exception {
byte[] bytes = getOriginalBytes();
return new String(bytes, "UTF8");
}
public static Comprobante newComprobante(InputStream in) throws Exception {
return load(in);
}
byte[] getOriginalBytes(InputStream in) throws Exception{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
Source source = new StreamSource(in);
Source xsl = new StreamSource(getClass().getResourceAsStream(XSLT));
Result out = new StreamResult(baos);
TransformerFactory factory = tf;
if (factory == null) {
factory = TransformerFactory.newInstance();
factory.setURIResolver(new URIResolverImpl());
}
Transformer transformer = factory
.newTransformer(new StreamSource(getClass().getResourceAsStream(XSLT)));
transformer.transform(source, out);
} finally {
in.close();
}
return baos.toByteArray();
}
byte[] getOriginalBytes() throws Exception {
JAXBSource in = new JAXBSource(context, document);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Result out = new StreamResult(baos);
TransformerFactory factory = tf;
if (factory == null) {
factory = TransformerFactory.newInstance();
factory.setURIResolver(new URIResolverImpl());
}
Transformer transformer = factory
.newTransformer(new StreamSource(getClass().getResourceAsStream(XSLT)));
transformer.transform(in, out);
return baos.toByteArray();
}
String getSignature(PrivateKey key) throws Exception {
byte[] bytes = getOriginalBytes();
byte[] signed;
String alg = getDigestAlgorithm();
Signature sig = Signature.getInstance(alg);
sig.initSign(key);
sig.update(bytes);
signed = sig.sign();
Base64 b64 = new Base64(-1);
return b64.encodeToString(signed);
}
private Comprobante getComprobante() throws Exception {
return copy(document);
}
// Defensive deep-copy
private Comprobante copy(Comprobante comprobante) throws Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.newDocument();
Marshaller m = context.createMarshaller();
m.marshal(comprobante, doc);
Unmarshaller u = context.createUnmarshaller();
return (Comprobante) u.unmarshal(doc);
}
private String getDigestAlgorithm() {
return (getYear() > 2010) ? "SHA1withRSA" : "MD5withRSA";
}
private int getYear() {
Date date = document.getFecha();
Calendar c = Calendar.getInstance();
c.setTime(date);
return c.get(Calendar.YEAR);
}
private static JAXBContext getContext(String[] contexts) throws Exception {
List<String> ctx = Lists.asList(BASE_CONTEXT, contexts);
return JAXBContext.newInstance(JOINER.join(ctx));
}
private static Comprobante load(InputStream in, String... contexts)
throws Exception {
JAXBContext context = getContext(contexts);
try {
Unmarshaller u = context.createUnmarshaller();
return (Comprobante) u.unmarshal(in);
} finally {
in.close();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.LineString;
import org.locationtech.jts.geom.LinearRing;
import org.locationtech.jts.geom.MultiLineString;
import org.locationtech.jts.geom.Point;
import org.locationtech.jts.geom.Polygon;
import org.apache.lucene.geo.GeoTestUtil;
import org.apache.lucene.geo.Line;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
import org.elasticsearch.common.geo.builders.LineStringBuilder;
import org.elasticsearch.common.geo.builders.MultiLineStringBuilder;
import org.elasticsearch.common.geo.builders.MultiPointBuilder;
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
import org.elasticsearch.common.geo.builders.PointBuilder;
import org.elasticsearch.common.geo.builders.PolygonBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.parsers.GeoWKTParser;
import org.elasticsearch.common.geo.parsers.ShapeParser;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.test.geo.RandomShapeGenerator;
import org.locationtech.spatial4j.exception.InvalidShapeException;
import org.locationtech.spatial4j.shape.Rectangle;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.ShapeCollection;
import org.locationtech.spatial4j.shape.jts.JtsPoint;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasToString;
/**
* Tests for {@code GeoWKTShapeParser}
*/
public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase {
private static XContentBuilder toWKTContent(ShapeBuilder builder, boolean generateMalformed)
throws IOException {
String wkt = builder.toWKT();
if (generateMalformed) {
// malformed - extra paren
// TODO generate more malformed WKT
wkt += GeoWKTParser.RPAREN;
}
if (randomBoolean()) {
// test comments
wkt = "# " + wkt + "\n" + wkt;
}
return XContentFactory.jsonBuilder().value(wkt);
}
private void assertExpected(Object expected, ShapeBuilder<?, ?> builder, boolean useJTS) throws IOException {
XContentBuilder xContentBuilder = toWKTContent(builder, false);
assertGeometryEquals(expected, xContentBuilder, useJTS);
}
private void assertMalformed(ShapeBuilder builder) throws IOException {
XContentBuilder xContentBuilder = toWKTContent(builder, true);
assertValidException(xContentBuilder, ElasticsearchParseException.class);
}
@Override
public void testParsePoint() throws IOException {
GeoPoint p = RandomShapeGenerator.randomPoint(random());
Coordinate c = new Coordinate(p.lon(), p.lat());
Point expected = GEOMETRY_FACTORY.createPoint(c);
assertExpected(new JtsPoint(expected, SPATIAL_CONTEXT), new PointBuilder().coordinate(c), true);
assertExpected(new GeoPoint(p.lat(), p.lon()), new PointBuilder().coordinate(c), false);
assertMalformed(new PointBuilder().coordinate(c));
}
@Override
public void testParseMultiPoint() throws IOException {
int numPoints = randomIntBetween(2, 100);
List<Coordinate> coordinates = new ArrayList<>(numPoints);
for (int i = 0; i < numPoints; ++i) {
coordinates.add(new Coordinate(GeoTestUtil.nextLongitude(), GeoTestUtil.nextLatitude()));
}
Shape[] shapes = new Shape[numPoints];
for (int i = 0; i < numPoints; ++i) {
Coordinate c = coordinates.get(i);
shapes[i] = SPATIAL_CONTEXT.makePoint(c.x, c.y);
}
ShapeCollection<?> expected = shapeCollection(shapes);
assertExpected(expected, new MultiPointBuilder(coordinates), true);
double[][] luceneShapes = new double[numPoints][2];
for (int i = 0; i < numPoints; ++i) {
Coordinate c = coordinates.get(i);
luceneShapes[i][0] = c.x;
luceneShapes[i][1] = c.y;
}
assertExpected(luceneShapes, new MultiPointBuilder(coordinates), false);
assertMalformed(new MultiPointBuilder(coordinates));
}
private List<Coordinate> randomLineStringCoords() {
int numPoints = randomIntBetween(2, 100);
List<Coordinate> coordinates = new ArrayList<>(numPoints);
GeoPoint p;
for (int i = 0; i < numPoints; ++i) {
p = RandomShapeGenerator.randomPointIn(random(), -90d, -90d, 90d, 90d);
coordinates.add(new Coordinate(p.lon(), p.lat()));
}
return coordinates;
}
@Override
public void testParseLineString() throws IOException {
List<Coordinate> coordinates = randomLineStringCoords();
LineString expected = GEOMETRY_FACTORY.createLineString(coordinates.toArray(new Coordinate[coordinates.size()]));
assertExpected(jtsGeom(expected), new LineStringBuilder(coordinates), true);
double[] lats = new double[coordinates.size()];
double[] lons = new double[lats.length];
for (int i = 0; i < lats.length; ++i) {
lats[i] = coordinates.get(i).y;
lons[i] = coordinates.get(i).x;
}
assertExpected(new Line(lats, lons), new LineStringBuilder(coordinates), false);
}
@Override
public void testParseMultiLineString() throws IOException {
int numLineStrings = randomIntBetween(2, 8);
List<LineString> lineStrings = new ArrayList<>(numLineStrings);
MultiLineStringBuilder builder = new MultiLineStringBuilder();
for (int j = 0; j < numLineStrings; ++j) {
List<Coordinate> lsc = randomLineStringCoords();
Coordinate [] coords = lsc.toArray(new Coordinate[lsc.size()]);
lineStrings.add(GEOMETRY_FACTORY.createLineString(coords));
builder.linestring(new LineStringBuilder(lsc));
}
MultiLineString expected = GEOMETRY_FACTORY.createMultiLineString(
lineStrings.toArray(new LineString[lineStrings.size()]));
assertExpected(jtsGeom(expected), builder, true);
Line[] lines = new Line[lineStrings.size()];
for (int j = 0; j < lineStrings.size(); ++j) {
Coordinate[] c = lineStrings.get(j).getCoordinates();
lines[j] = new Line(Arrays.stream(c).mapToDouble(i->i.y).toArray(),
Arrays.stream(c).mapToDouble(i->i.x).toArray());
}
assertExpected(lines, builder, false);
assertMalformed(builder);
}
@Override
public void testParsePolygon() throws IOException {
PolygonBuilder builder = PolygonBuilder.class.cast(
RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.POLYGON));
Coordinate[] coords = builder.coordinates()[0][0];
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(coords);
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null);
assertExpected(jtsGeom(expected), builder, true);
assertMalformed(builder);
}
@Override
public void testParseMultiPolygon() throws IOException {
int numPolys = randomIntBetween(2, 8);
MultiPolygonBuilder builder = new MultiPolygonBuilder();
PolygonBuilder pb;
Coordinate[] coordinates;
Polygon[] shapes = new Polygon[numPolys];
LinearRing shell;
for (int i = 0; i < numPolys; ++i) {
pb = PolygonBuilder.class.cast(RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.POLYGON));
builder.polygon(pb);
coordinates = pb.coordinates()[0][0];
shell = GEOMETRY_FACTORY.createLinearRing(coordinates);
shapes[i] = GEOMETRY_FACTORY.createPolygon(shell, null);
}
Shape expected = shapeCollection(shapes);
assertExpected(expected, builder, true);
assertMalformed(builder);
}
public void testParsePolygonWithHole() throws IOException {
// add 3d point to test ISSUE #10501
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
PolygonBuilder polygonWithHole = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates));
polygonWithHole.hole(new LineStringBuilder(holeCoordinates));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(
shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
LinearRing[] holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(
holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes);
assertExpected(jtsGeom(expected), polygonWithHole, true);
org.apache.lucene.geo.Polygon hole =
new org.apache.lucene.geo.Polygon(
new double[] {0.8d, 0.8d, 0.2d, 0.2d, 0.8d}, new double[] {100.2d, 100.8d, 100.8d, 100.2d, 100.2d});
org.apache.lucene.geo.Polygon p =
new org.apache.lucene.geo.Polygon(
new double[] {0d, 1d, 1d, 0d, 0d}, new double[] {101d, 101d, 100d, 100d, 101d}, hole);
assertExpected(p, polygonWithHole, false);
assertMalformed(polygonWithHole);
}
public void testParseMixedDimensionPolyWithHole() throws IOException {
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
// add 3d point to test ISSUE #10501
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2, 15.0));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8, 10.0));
holeCoordinates.add(new Coordinate(100.2, 0.2));
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates));
builder.hole(new LineStringBuilder(holeCoordinates));
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT());
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build();
Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath());
final GeoShapeFieldMapper mapperBuilder =
(GeoShapeFieldMapper) (new GeoShapeFieldMapper.Builder("test").ignoreZValue(false).build(mockBuilderContext));
// test store z disabled
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> ShapeParser.parse(parser, mapperBuilder));
assertThat(e, hasToString(containsString("but [ignore_z_value] parameter is [false]")));
}
public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException {
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
// add 3d point to test ISSUE #10501
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2, 15.0));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8, 10.0));
holeCoordinates.add(new Coordinate(100.2, 0.2));
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates));
builder.hole(new LineStringBuilder(holeCoordinates));
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT());
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build();
Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath());
final LegacyGeoShapeFieldMapper mapperBuilder =
(LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext));
// test store z disabled
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> ShapeParser.parse(parser, mapperBuilder));
assertThat(e, hasToString(containsString("unable to add coordinate to CoordinateBuilder: coordinate dimensions do not match")));
}
public void testParsePolyWithStoredZ() throws IOException {
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0, 0));
shellCoordinates.add(new Coordinate(101, 0, 0));
shellCoordinates.add(new Coordinate(101, 1, 0));
shellCoordinates.add(new Coordinate(100, 1, 5));
shellCoordinates.add(new Coordinate(100, 0, 5));
PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates));
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(builder.toWKT());
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build();
Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath());
final LegacyGeoShapeFieldMapper mapperBuilder =
(LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext));
ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder);
assertEquals(shapeBuilder.numDimensions(), 3);
}
public void testParseOpenPolygon() throws IOException {
String openPolygon = "POLYGON ((100 5, 100 10, 90 10, 90 5))";
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().value(openPolygon);
XContentParser parser = createParser(xContentBuilder);
parser.nextToken();
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build();
Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath());
final LegacyGeoShapeFieldMapper defaultMapperBuilder =
(LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").coerce(false).build(mockBuilderContext));
ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class,
() -> ShapeParser.parse(parser, defaultMapperBuilder));
assertEquals("invalid LinearRing found (coordinates are not closed)", exception.getMessage());
final LegacyGeoShapeFieldMapper coercingMapperBuilder =
(LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").coerce(true).build(mockBuilderContext));
ShapeBuilder<?, ?> shapeBuilder = ShapeParser.parse(parser, coercingMapperBuilder);
assertNotNull(shapeBuilder);
assertEquals("polygon ((100.0 5.0, 100.0 10.0, 90.0 10.0, 90.0 5.0, 100.0 5.0))", shapeBuilder.toWKT());
}
public void testParseSelfCrossingPolygon() throws IOException {
// test self crossing ccw poly not crossing dateline
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(176, 15));
shellCoordinates.add(new Coordinate(-177, 10));
shellCoordinates.add(new Coordinate(-177, -10));
shellCoordinates.add(new Coordinate(176, -15));
shellCoordinates.add(new Coordinate(-177, 15));
shellCoordinates.add(new Coordinate(172, 0));
shellCoordinates.add(new Coordinate(176, 15));
PolygonBuilder poly = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates));
XContentBuilder builder = XContentFactory.jsonBuilder().value(poly.toWKT());
assertValidException(builder, InvalidShapeException.class);
}
public void testMalformedWKT() throws IOException {
// malformed points in a polygon is a common typo
String malformedWKT = "POLYGON ((100, 5) (100, 10) (90, 10), (90, 5), (100, 5)";
XContentBuilder builder = XContentFactory.jsonBuilder().value(malformedWKT);
assertValidException(builder, ElasticsearchParseException.class);
}
@Override
public void testParseEnvelope() throws IOException {
org.apache.lucene.geo.Rectangle r = GeoTestUtil.nextBox();
EnvelopeBuilder builder = new EnvelopeBuilder(new Coordinate(r.minLon, r.maxLat), new Coordinate(r.maxLon, r.minLat));
Rectangle expected = SPATIAL_CONTEXT.makeRectangle(r.minLon, r.maxLon, r.minLat, r.maxLat);
assertExpected(expected, builder, true);
assertExpected(r, builder, false);
assertMalformed(builder);
}
public void testInvalidGeometryType() throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().value("UnknownType (-1 -2)");
assertValidException(builder, IllegalArgumentException.class);
}
@Override
public void testParseGeometryCollection() throws IOException {
if (rarely()) {
// assert empty shape collection
GeometryCollectionBuilder builder = new GeometryCollectionBuilder();
Shape[] expected = new Shape[0];
if (randomBoolean()) {
assertEquals(shapeCollection(expected).isEmpty(), builder.buildS4J().isEmpty());
} else {
assertEquals(shapeCollection(expected).isEmpty(), ((Object[])builder.buildLucene()).length == 0);
}
} else {
GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random());
assertExpected(gcb.buildS4J(), gcb, true);
assertExpected(gcb.buildLucene(), gcb, false);
}
}
public void testUnexpectedShapeException() throws IOException {
XContentBuilder builder = toWKTContent(new PointBuilder(-1, 2), false);
XContentParser parser = createParser(builder);
parser.nextToken();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> GeoWKTParser.parseExpectedType(parser, GeoShapeType.POLYGON));
assertThat(e, hasToString(containsString("Expected geometry type [polygon] but found [point]")));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: ImageLoaderRawCCITTFax.java 1177544 2011-09-30 07:58:51Z phancock $ */
package org.apache.xmlgraphics.image.loader.impl;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import javax.imageio.stream.ImageInputStream;
import javax.xml.transform.Source;
import lombok.extern.slf4j.Slf4j;
import org.apache.xmlgraphics.image.codec.tiff.TIFFDirectory;
import org.apache.xmlgraphics.image.codec.tiff.TIFFField;
import org.apache.xmlgraphics.image.codec.tiff.TIFFImage;
import org.apache.xmlgraphics.image.codec.tiff.TIFFImageDecoder;
import org.apache.xmlgraphics.image.codec.util.SeekableStream;
import org.apache.xmlgraphics.image.loader.Image;
import org.apache.xmlgraphics.image.loader.ImageException;
import org.apache.xmlgraphics.image.loader.ImageFlavor;
import org.apache.xmlgraphics.image.loader.ImageInfo;
import org.apache.xmlgraphics.image.loader.ImageSessionContext;
import org.apache.xmlgraphics.image.loader.util.ImageUtil;
import org.apache.xmlgraphics.image.loader.util.SeekableStreamAdapter;
import org.apache.xmlgraphics.util.MimeConstants;
import org.apache.xmlgraphics.util.io.SubInputStream;
/**
* ImageLoader for CCITT group 3 and 4 images consumed "raw" (undecoded).
* Provides a raw/undecoded stream.
*/
@Slf4j
public class ImageLoaderRawCCITTFax extends AbstractImageLoader implements
JPEGConstants {
private static final int COMPRESSION_CCITT_1D = 2;
private static final int COMPRESSION_FAX_GROUP3 = 3;
private static final int COMPRESSION_FAX_GROUP4 = 4;
/**
* Main constructor.
*/
public ImageLoaderRawCCITTFax() {
}
/** {@inheritDoc} */
@Override
public ImageFlavor getTargetFlavor() {
return ImageFlavor.RAW_CCITTFAX;
}
/** {@inheritDoc} */
@Override
public Image loadImage(final ImageInfo info,
final Map<Object, Object> hints, final ImageSessionContext session)
throws ImageException, IOException {
if (!MimeConstants.MIME_TIFF.equals(info.getMimeType())) {
throw new IllegalArgumentException(
"ImageInfo must be from a image with MIME type: "
+ MimeConstants.MIME_TIFF);
}
int fillOrder = 1;
int compression = TIFFImage.COMP_NONE;
long stripOffset;
long stripLength;
TIFFDirectory dir;
final Source src = session.needSource(info.getOriginalURI());
try (final ImageInputStream in = ImageUtil.needImageInputStream(src)) {
in.mark();
try {
final SeekableStream seekable = new SeekableStreamAdapter(in);
dir = new TIFFDirectory(seekable, 0);
TIFFField fld;
fld = dir.getField(TIFFImageDecoder.TIFF_COMPRESSION);
if (fld != null) {
compression = fld.getAsInt(0);
switch (compression) {
case COMPRESSION_CCITT_1D:
case COMPRESSION_FAX_GROUP4:
break;
case COMPRESSION_FAX_GROUP3:
// Note: the TIFFImage compression constants seem to be
// a
// bit misleading!
compression = TIFFImage.COMP_FAX_G3_1D; // 1D is the
// default
// for Group3
fld = dir.getField(TIFFImageDecoder.TIFF_T4_OPTIONS);
if (fld != null) {
final long t4Options = fld.getAsLong(0);
if ((t4Options & 0x01) != 0) {
compression = TIFFImage.COMP_FAX_G3_2D; // "Abusing"
// for 2D
// signalling
}
}
break;
default:
log.debug("Unsupported compression " + compression);
seekable.close();
throw new ImageException(
"ImageLoader doesn't support TIFF compression: "
+ compression);
}
}
// Read information used for raw embedding
fld = dir.getField(TIFFImageDecoder.TIFF_FILL_ORDER);
if (fld != null) {
fillOrder = fld.getAsInt(0);
}
int stripCount;
fld = dir.getField(TIFFImageDecoder.TIFF_ROWS_PER_STRIP);
if (fld == null) {
stripCount = 1;
} else {
stripCount = (int) (info.getSize().getHeightPx() / fld
.getAsLong(0));
}
if (stripCount > 1) {
log.debug("More than one strip found in TIFF image.");
seekable.close();
throw new ImageException(
"ImageLoader doesn't support multiple strips");
}
stripOffset = dir.getField(TIFFImageDecoder.TIFF_STRIP_OFFSETS)
.getAsLong(0);
stripLength = dir.getField(
TIFFImageDecoder.TIFF_STRIP_BYTE_COUNTS).getAsLong(0);
} finally {
in.reset();
}
in.seek(stripOffset);
InputStream subin = new SubInputStream(
ImageUtil.needInputStream(src), stripLength, true);
if (fillOrder == 2) {
// Decorate to flip bit order
subin = new FillOrderChangeInputStream(subin);
}
final ImageRawCCITTFax rawImage = new ImageRawCCITTFax(info, subin,
compression);
// Strip stream from source as we pass it on internally
ImageUtil.removeStreams(src);
return rawImage;
}
}
private static class FillOrderChangeInputStream extends FilterInputStream {
protected FillOrderChangeInputStream(final InputStream in) {
super(in);
}
/** {@inheritDoc} */
@Override
public int read(final byte[] b, final int off, final int len)
throws IOException {
final int result = super.read(b, off, len);
if (result > 0) {
final int endpos = off + result;
for (int i = off; i < endpos; ++i) {
b[i] = FLIP_TABLE[b[i] & 0xff];
}
}
return result;
}
/** {@inheritDoc} */
@Override
public int read() throws IOException {
final int b = super.read();
if (b < 0) {
return b;
} else {
return FLIP_TABLE[b] & 0xff;
}
}
// Table to be used when fillOrder = 2, for flipping bytes.
// Copied from XML Graphics Commons' TIFFFaxDecoder class
private static final byte[] FLIP_TABLE = { 0, -128, 64, -64, 32, -96,
96, -32, 16, -112, 80, -48, 48, -80, 112, -16, 8, -120, 72,
-56, 40, -88, 104, -24, 24, -104, 88, -40, 56, -72, 120, -8, 4,
-124, 68, -60, 36, -92, 100, -28, 20, -108, 84, -44, 52, -76,
116, -12, 12, -116, 76, -52, 44, -84, 108, -20, 28, -100, 92,
-36, 60, -68, 124, -4, 2, -126, 66, -62, 34, -94, 98, -30, 18,
-110, 82, -46, 50, -78, 114, -14, 10, -118, 74, -54, 42, -86,
106, -22, 26, -102, 90, -38, 58, -70, 122, -6, 6, -122, 70,
-58, 38, -90, 102, -26, 22, -106, 86, -42, 54, -74, 118, -10,
14, -114, 78, -50, 46, -82, 110, -18, 30, -98, 94, -34, 62,
-66, 126, -2, 1, -127, 65, -63, 33, -95, 97, -31, 17, -111, 81,
-47, 49, -79, 113, -15, 9, -119, 73, -55, 41, -87, 105, -23,
25, -103, 89, -39, 57, -71, 121, -7, 5, -123, 69, -59, 37, -91,
101, -27, 21, -107, 85, -43, 53, -75, 117, -11, 13, -115, 77,
-51, 45, -83, 109, -19, 29, -99, 93, -35, 61, -67, 125, -3, 3,
-125, 67, -61, 35, -93, 99, -29, 19, -109, 83, -45, 51, -77,
115, -13, 11, -117, 75, -53, 43, -85, 107, -21, 27, -101, 91,
-37, 59, -69, 123, -5, 7, -121, 71, -57, 39, -89, 103, -25, 23,
-105, 87, -41, 55, -73, 119, -9, 15, -113, 79, -49, 47, -81,
111, -17, 31, -97, 95, -33, 63, -65, 127, -1, };
// end
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import com.carrotsearch.randomizedtesting.annotations.Repeat;
import org.apache.lucene.index.CorruptIndexException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionWriteResponse;
import org.elasticsearch.action.UnavailableShardsException;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.support.ActionFilter;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.IndexShardNotStartedException;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESAllocationTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseOptions;
import org.elasticsearch.transport.TransportService;
import org.hamcrest.Matcher;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class TransportReplicationActionTests extends ESTestCase {
private static ThreadPool threadPool;
private TestClusterService clusterService;
private TransportService transportService;
private CapturingTransport transport;
private Action action;
/* *
* TransportReplicationAction needs an instance of IndexShard to count operations.
* indexShards is reset to null before each test and will be initialized upon request in the tests.
*/
@BeforeClass
public static void beforeClass() {
threadPool = new ThreadPool("ShardReplicationTests");
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
transport = new CapturingTransport();
clusterService = new TestClusterService(threadPool);
transportService = new TransportService(transport, threadPool);
transportService.start();
transportService.acceptIncomingRequests();
action = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool);
count.set(1);
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
<T> void assertListenerThrows(String msg, PlainActionFuture<T> listener, Class<?> klass) throws InterruptedException {
try {
listener.get();
fail(msg);
} catch (ExecutionException ex) {
assertThat(ex.getCause(), instanceOf(klass));
}
}
@Test
public void testBlocks() throws ExecutionException, InterruptedException {
Request request = new Request();
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
ClusterBlocks.Builder block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertListenerThrows("primary phase should fail operation", listener, ClusterBlockException.class);
assertPhase(task, "failed");
block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "retryable", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, new Request().timeout("5ms"), listener);
reroutePhase.run();
assertListenerThrows("failed to timeout on retryable block", listener, ClusterBlockException.class);
assertPhase(task, "failed");
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, new Request(), listener);
reroutePhase.run();
assertFalse("primary phase should wait on retryable block", listener.isDone());
assertPhase(task, "waiting_for_retry");
block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
assertListenerThrows("primary phase should fail operation when moving from a retryable block to a non-retryable one", listener, ClusterBlockException.class);
assertIndexShardUninitialized();
}
public void assertIndexShardUninitialized() {
assertEquals(1, count.get());
}
@Test
public void testNotStartedPrimary() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// no replicas in oder to skip the replication part
clusterService.setState(state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
ReplicationTask task = maybeTask();
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertListenerThrows("unassigned primary didn't cause a timeout", listener, UnavailableShardsException.class);
assertPhase(task, "failed");
request = new Request(shardId);
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertFalse("unassigned primary didn't cause a retry", listener.isDone());
assertPhase(task, "waiting_for_retry");
clusterService.setState(state(index, true, ShardRoutingState.STARTED));
logger.debug("--> primary assigned state:\n{}", clusterService.state().prettyPrint());
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
final List<CapturingTransport.CapturedRequest> capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
assertThat(capturedRequests.get(0).action, equalTo("testAction[p]"));
assertIndexShardCounter(1);
}
/**
* When relocating a primary shard, there is a cluster state update at the end of relocation where the active primary is switched from
* the relocation source to the relocation target. If relocation source receives and processes this cluster state
* before the relocation target, there is a time span where relocation source believes active primary to be on
* relocation target and relocation target believes active primary to be on relocation source. This results in replication
* requests being sent back and forth.
*
* This test checks that replication request is not routed back from relocation target to relocation source in case of
* stale index routing table on relocation target.
*/
@Test
public void testNoRerouteOnStaleClusterState() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
ClusterState state = state(index, true, ShardRoutingState.RELOCATING);
IndexShardRoutingTable shardRoutingTable = state.getRoutingTable().shardRoutingTable(shardId.getIndex(), shardId.id());
String relocationTargetNode = shardRoutingTable.primaryShard().relocatingNodeId();
state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(relocationTargetNode)).build();
clusterService.setState(state);
logger.debug("--> relocation ongoing state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("1ms").routedBasedOnClusterVersion(clusterService.state().version() + 1);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("cluster state too old didn't cause a timeout", listener, UnavailableShardsException.class);
request = new Request(shardId).routedBasedOnClusterVersion(clusterService.state().version() + 1);
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertFalse("cluster state too old didn't cause a retry", listener.isDone());
// finish relocation
shardRoutingTable = clusterService.state().getRoutingTable().shardRoutingTable(shardId.getIndex(), shardId.id());
ShardRouting relocationTarget = shardRoutingTable.shardsWithState(ShardRoutingState.INITIALIZING).get(0);
AllocationService allocationService = ESAllocationTestCase.createAllocationService();
RoutingAllocation.Result result = allocationService.applyStartedShards(state, Arrays.asList(relocationTarget));
ClusterState updatedState = ClusterState.builder(clusterService.state()).routingResult(result).build();
clusterService.setState(updatedState);
logger.debug("--> relocation complete state:\n{}", clusterService.state().prettyPrint());
shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
final List<CapturingTransport.CapturedRequest> capturedRequests =
transport.capturedRequestsByTargetNode().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
assertThat(capturedRequests.get(0).action, equalTo("testAction[p]"));
assertIndexShardCounter(1);
}
@Test
public void testUnknownIndexOrShardOnReroute() throws InterruptedException {
final String index = "test";
// no replicas in oder to skip the replication part
clusterService.setState(state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(new ShardId("unknown_index", 0)).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertListenerThrows("must throw index not found exception", listener, IndexNotFoundException.class);
assertPhase(task, "failed");
request = new Request(new ShardId(index, 10)).timeout("1ms");
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("must throw shard not found exception", listener, ShardNotFoundException.class);
}
@Test
public void testRoutePhaseExecutesRequest() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
ReplicationTask task = maybeTask();
clusterService.setState(stateWithStartedPrimary(index, randomBoolean(), 3));
logger.debug("using state: \n{}", clusterService.state().prettyPrint());
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertThat(request.shardId(), equalTo(shardId));
logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId);
final List<CapturingTransport.CapturedRequest> capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
if (clusterService.state().nodes().localNodeId().equals(primaryNodeId)) {
assertThat(capturedRequests.get(0).action, equalTo("testAction[p]"));
assertPhase(task, "waiting_on_primary");
} else {
assertThat(capturedRequests.get(0).action, equalTo("testAction"));
assertPhase(task, "rerouted");
}
assertIndexShardUninitialized();
}
@Test
public void testPrimaryPhaseExecutesRequest() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
ReplicationTask task = maybeTask();
clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED));
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
final String replicaNodeId = clusterService.state().getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0).currentNodeId();
final List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(replicaNodeId);
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("replica request was not sent", requests.get(0).action, equalTo("testAction[r]"));
}
@Test
public void testAddedReplicaAfterPrimaryOperation() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// start with no replicas
clusterService.setState(stateWithStartedPrimary(index, true, 0));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithAddedReplicas = state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED);
ReplicationTask task = maybeTask();
final Action actionWithAddedReplicaAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) {
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
final Tuple<Response, Request> operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest);
// add replicas after primary operation
((TestClusterService) clusterService).setState(stateWithAddedReplicas);
logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint());
return operationOnPrimary;
}
};
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = actionWithAddedReplicaAfterPrimaryOp.new PrimaryPhase(task, request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
assertPhase(task, "replicating");
for (ShardRouting replica : stateWithAddedReplicas.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards()) {
List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(replica.currentNodeId());
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("replica request was not sent", requests.get(0).action, equalTo("testAction[r]"));
}
}
@Test
public void testRelocatingReplicaAfterPrimaryOperation() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// start with a replica
clusterService.setState(state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithRelocatingReplica = state(index, true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
final Action actionWithRelocatingReplicasAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) {
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
final Tuple<Response, Request> operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest);
// set replica to relocating
((TestClusterService) clusterService).setState(stateWithRelocatingReplica);
logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint());
return operationOnPrimary;
}
};
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = actionWithRelocatingReplicasAfterPrimaryOp.new PrimaryPhase(
task, request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
ShardRouting relocatingReplicaShard = stateWithRelocatingReplica.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0);
assertPhase(task, "replicating");
for (String node : new String[] {relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) {
List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(node);
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("replica request was not sent to replica", requests.get(0).action, equalTo("testAction[r]"));
}
}
@Test
public void testIndexDeletedAfterPrimaryOperation() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithDeletedIndex = state(index + "_new", true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
final Action actionWithDeletedIndexAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) {
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
final Tuple<Response, Request> operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest);
// delete index after primary op
((TestClusterService) clusterService).setState(stateWithDeletedIndex);
logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint());
return operationOnPrimary;
}
};
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = actionWithDeletedIndexAfterPrimaryOp.new PrimaryPhase(
task, request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
assertThat("replication phase should be skipped if index gets deleted after primary operation", transport.capturedRequestsByTargetNode().size(), equalTo(0));
assertPhase(task, "finished");
}
@Test
public void testWriteConsistency() throws ExecutionException, InterruptedException {
action = new ActionWithConsistency(Settings.EMPTY, "testActionWithConsistency", transportService, clusterService, threadPool);
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
final int assignedReplicas = randomInt(2);
final int unassignedReplicas = randomInt(2);
final int totalShards = 1 + assignedReplicas + unassignedReplicas;
final boolean passesWriteConsistency;
Request request = new Request(shardId).consistencyLevel(randomFrom(WriteConsistencyLevel.values()));
switch (request.consistencyLevel()) {
case ONE:
passesWriteConsistency = true;
break;
case DEFAULT:
case QUORUM:
if (totalShards <= 2) {
passesWriteConsistency = true; // primary is enough
} else {
passesWriteConsistency = assignedReplicas + 1 >= (totalShards / 2) + 1;
}
break;
case ALL:
passesWriteConsistency = unassignedReplicas == 0;
break;
default:
throw new RuntimeException("unknown consistency level [" + request.consistencyLevel() + "]");
}
ShardRoutingState[] replicaStates = new ShardRoutingState[assignedReplicas + unassignedReplicas];
for (int i = 0; i < assignedReplicas; i++) {
replicaStates[i] = randomFrom(ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
}
for (int i = assignedReplicas; i < replicaStates.length; i++) {
replicaStates[i] = ShardRoutingState.UNASSIGNED;
}
clusterService.setState(state(index, true, ShardRoutingState.STARTED, replicaStates));
logger.debug("using consistency level of [{}], assigned shards [{}], total shards [{}]. expecting op to [{}]. using state: \n{}",
request.consistencyLevel(), 1 + assignedReplicas, 1 + assignedReplicas + unassignedReplicas, passesWriteConsistency ? "succeed" : "retry",
clusterService.state().prettyPrint());
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
if (passesWriteConsistency) {
assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard().shardId()), nullValue());
primaryPhase.run();
assertTrue("operations should have been performed, consistency level is met", request.processedOnPrimary.get());
if (assignedReplicas > 0) {
assertIndexShardCounter(2);
} else {
assertIndexShardCounter(1);
}
assertPhase(task, either(equalTo("finished")).or(equalTo("replicating")));
} else {
assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard().shardId()), notNullValue());
primaryPhase.run();
assertFalse("operations should not have been perform, consistency level is *NOT* met", request.processedOnPrimary.get());
assertListenerThrows("should throw exception to trigger retry", listener, UnavailableShardsException.class);
assertIndexShardUninitialized();
for (int i = 0; i < replicaStates.length; i++) {
replicaStates[i] = ShardRoutingState.STARTED;
}
clusterService.setState(state(index, true, ShardRoutingState.STARTED, replicaStates));
listener = new PlainActionFuture<>();
primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
primaryPhase.run();
assertTrue("once the consistency level met, operation should continue", request.processedOnPrimary.get());
assertIndexShardCounter(2);
assertPhase(task, "replicating");
}
}
@Test
public void testReplication() throws ExecutionException, InterruptedException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
int assignedReplicas = 0;
int totalShards = 0;
for (ShardRouting shard : shardRoutingTable) {
totalShards++;
if (shard.primary() == false && shard.assignedToNode()) {
assignedReplicas++;
}
if (shard.relocating()) {
assignedReplicas++;
totalShards++;
}
}
runReplicateTest(shardRoutingTable, assignedReplicas, totalShards);
}
@Test
public void testReplicationWithShadowIndex() throws ExecutionException, InterruptedException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
ClusterState state = stateWithStartedPrimary(index, true, randomInt(5));
MetaData.Builder metaData = MetaData.builder(state.metaData());
Settings.Builder settings = Settings.builder().put(metaData.get(index).getSettings());
settings.put(IndexMetaData.SETTING_SHADOW_REPLICAS, true);
metaData.put(IndexMetaData.builder(metaData.get(index)).settings(settings));
clusterService.setState(ClusterState.builder(state).metaData(metaData));
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
int assignedReplicas = 0;
int totalShards = 0;
for (ShardRouting shard : shardRoutingTable) {
totalShards++;
if (shard.primary() && shard.relocating()) {
assignedReplicas++;
totalShards++;
}
}
runReplicateTest(shardRoutingTable, assignedReplicas, totalShards);
}
protected void runReplicateTest(IndexShardRoutingTable shardRoutingTable, int assignedReplicas, int totalShards) throws InterruptedException, ExecutionException {
final ShardIterator shardIt = shardRoutingTable.shardsIt();
final ShardId shardId = shardIt.shardId();
final Request request = new Request(shardId);
final PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
logger.debug("expecting [{}] assigned replicas, [{}] total shards. using state: \n{}", assignedReplicas, totalShards, clusterService.state().prettyPrint());
Releasable reference = getOrCreateIndexShardOperationsCounter();
assertIndexShardCounter(2);
TransportReplicationAction<Request, Request, Response>.ReplicationPhase replicationPhase =
action.new ReplicationPhase(task, request,
new Response(),
request.shardId(), createTransportChannel(listener), reference);
assertThat(replicationPhase.totalShards(), equalTo(totalShards));
assertThat(replicationPhase.pending(), equalTo(assignedReplicas));
replicationPhase.run();
final CapturingTransport.CapturedRequest[] capturedRequests = transport.capturedRequests();
transport.clear();
assertPhase(task, either(equalTo("finished")).or(equalTo("replicating")));
HashMap<String, Request> nodesSentTo = new HashMap<>();
boolean executeOnReplica =
action.shouldExecuteReplication(clusterService.state().getMetaData().index(shardId.getIndex()).getSettings());
for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) {
// no duplicate requests
Request replicationRequest = (Request) capturedRequest.request;
assertNull(nodesSentTo.put(capturedRequest.node.getId(), replicationRequest));
// the request is hitting the correct shard
assertEquals(request.shardId, replicationRequest.shardId);
}
// no request was sent to the local node
assertThat(nodesSentTo.keySet(), not(hasItem(clusterService.state().getNodes().localNodeId())));
// requests were sent to the correct shard copies
for (ShardRouting shard : clusterService.state().getRoutingTable().shardRoutingTable(shardId.getIndex(), shardId.id())) {
if (shard.primary() == false && executeOnReplica == false) {
continue;
}
if (shard.unassigned()) {
continue;
}
if (shard.primary() == false) {
nodesSentTo.remove(shard.currentNodeId());
}
if (shard.relocating()) {
nodesSentTo.remove(shard.relocatingNodeId());
}
}
assertThat(nodesSentTo.entrySet(), is(empty()));
if (assignedReplicas > 0) {
assertThat("listener is done, but there are outstanding replicas", listener.isDone(), equalTo(false));
}
int pending = replicationPhase.pending();
int criticalFailures = 0; // failures that should fail the shard
int successful = 1;
List<CapturingTransport.CapturedRequest> failures = new ArrayList<>();
for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) {
if (randomBoolean()) {
Throwable t;
boolean criticalFailure = randomBoolean();
if (criticalFailure) {
t = new CorruptIndexException("simulated", (String) null);
criticalFailures++;
} else {
t = new IndexShardNotStartedException(shardId, IndexShardState.RECOVERING);
}
logger.debug("--> simulating failure on {} with [{}]", capturedRequest.node, t.getClass().getSimpleName());
transport.handleResponse(capturedRequest.requestId, t);
if (criticalFailure) {
CapturingTransport.CapturedRequest[] shardFailedRequests = transport.capturedRequests();
transport.clear();
assertEquals(1, shardFailedRequests.length);
CapturingTransport.CapturedRequest shardFailedRequest = shardFailedRequests[0];
// get the shard the request was sent to
ShardRouting routing = clusterService.state().getRoutingNodes().node(capturedRequest.node.id()).get(request.shardId.id());
// and the shard that was requested to be failed
ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry)shardFailedRequest.request;
// the shard the request was sent to and the shard to be failed should be the same
assertEquals(shardRoutingEntry.getShardRouting(), routing);
failures.add(shardFailedRequest);
transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE);
}
} else {
successful++;
transport.handleResponse(capturedRequest.requestId, TransportResponse.Empty.INSTANCE);
}
pending--;
assertThat(replicationPhase.pending(), equalTo(pending));
assertThat(replicationPhase.successful(), equalTo(successful));
}
assertThat(listener.isDone(), equalTo(true));
Response response = listener.get();
final ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo();
assertThat(shardInfo.getFailed(), equalTo(criticalFailures));
assertThat(shardInfo.getFailures(), arrayWithSize(criticalFailures));
assertThat(shardInfo.getSuccessful(), equalTo(successful));
assertThat(shardInfo.getTotal(), equalTo(totalShards));
assertThat("failed to see enough shard failures", failures.size(), equalTo(criticalFailures));
for (CapturingTransport.CapturedRequest capturedRequest : transport.capturedRequests()) {
assertThat(capturedRequest.action, equalTo(ShardStateAction.SHARD_FAILED_ACTION_NAME));
}
// all replicas have responded so the counter should be decreased again
assertIndexShardCounter(1);
}
@Test
public void testCounterOnPrimary() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// no replica, we only want to test on primary
clusterService.setState(state(index, true, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("100ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
/**
* Execute an action that is stuck in shard operation until a latch is counted down.
* That way we can start the operation, check if the counter was incremented and then unblock the operation
* again to see if the counter is decremented afterwards.
* TODO: I could also write an action that asserts that the counter is 2 in the shard operation.
* However, this failure would only become apparent once listener.get is called. Seems a little implicit.
* */
action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
Thread t = new Thread() {
@Override
public void run() {
primaryPhase.run();
}
};
t.start();
// shard operation should be ongoing, so the counter is at 2
// we have to wait here because increment happens in thread
assertBusy(new Runnable() {
@Override
public void run() {
assertIndexShardCounter(2);
}
});
assertThat(transport.capturedRequests().length, equalTo(0));
((ActionWithDelay) action).countDownLatch.countDown();
t.join();
listener.get();
// operation finished, counter back to 0
assertIndexShardCounter(1);
assertThat(transport.capturedRequests().length, equalTo(0));
assertPhase(task, "finished");
}
@Test
public void testCounterIncrementedWhileReplicationOngoing() throws InterruptedException, ExecutionException, IOException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// one replica to make sure replication is attempted
clusterService.setState(state(index, true,
ShardRoutingState.STARTED, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("100ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
primaryPhase.run();
assertIndexShardCounter(2);
assertThat(transport.capturedRequests().length, equalTo(1));
// try once with successful response
transport.handleResponse(transport.capturedRequests()[0].requestId, TransportResponse.Empty.INSTANCE);
assertIndexShardCounter(1);
transport.clear();
assertPhase(task, "finished");
request = new Request(shardId).timeout("100ms");
task = maybeTask();
primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
primaryPhase.run();
assertIndexShardCounter(2);
assertThat(transport.capturedRequests().length, equalTo(1));
assertPhase(task, "replicating");
// try with failure response
transport.handleResponse(transport.capturedRequests()[0].requestId, new CorruptIndexException("simulated", (String) null));
assertIndexShardCounter(1);
}
@Test
public void testReplicasCounter() throws Exception {
final ShardId shardId = new ShardId("test", 0);
clusterService.setState(state(shardId.index().getName(), true,
ShardRoutingState.STARTED, ShardRoutingState.STARTED));
action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final Action.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler();
final ReplicationTask task = maybeTask();
Thread t = new Thread() {
@Override
public void run() {
try {
replicaOperationTransportHandler.messageReceived(new Request().setShardId(shardId), createTransportChannel(new PlainActionFuture<Response>()), task);
} catch (Exception e) {
logger.error("Failed", e);
}
}
};
t.start();
// shard operation should be ongoing, so the counter is at 2
// we have to wait here because increment happens in thread
assertBusy(new Runnable() {
@Override
public void run() {
assertIndexShardCounter(2);
}
});
((ActionWithDelay) action).countDownLatch.countDown();
t.join();
assertPhase(task, "finished");
// operation should have finished and counter decreased because no outstanding replica requests
assertIndexShardCounter(1);
// now check if this also works if operation throws exception
action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final Action.ReplicaOperationTransportHandler replicaOperationTransportHandlerForException = action.new ReplicaOperationTransportHandler();
try {
replicaOperationTransportHandlerForException.messageReceived(new Request(shardId), createTransportChannel(new PlainActionFuture<Response>()), task);
fail();
} catch (Throwable t2) {
}
assertIndexShardCounter(1);
}
@Test
public void testCounterDecrementedIfShardOperationThrowsException() throws InterruptedException, ExecutionException, IOException {
action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
clusterService.setState(state(index, true,
ShardRoutingState.STARTED, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("100ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(task, request, createTransportChannel(listener));
primaryPhase.run();
// no replica request should have been sent yet
assertThat(transport.capturedRequests().length, equalTo(0));
// no matter if the operation is retried or not, counter must be be back to 1
assertIndexShardCounter(1);
assertPhase(task, "failed");
}
private void assertIndexShardCounter(int expected) {
assertThat(count.get(), equalTo(expected));
}
private final AtomicInteger count = new AtomicInteger(0);
/*
* Returns testIndexShardOperationsCounter or initializes it if it was already created in this test run.
* */
private synchronized Releasable getOrCreateIndexShardOperationsCounter() {
count.incrementAndGet();
return new Releasable() {
@Override
public void close() {
count.decrementAndGet();
}
};
}
/**
* Sometimes build a ReplicationTask for tracking the phase of the
* TransportReplicationAction. Since TransportReplicationAction has to work
* if the task as null just as well as if it is supplied this returns null
* half the time.
*/
private ReplicationTask maybeTask() {
return random().nextBoolean() ? new ReplicationTask(0, null, null, null, null) : null;
}
/**
* If the task is non-null this asserts that the phrase matches.
*/
private void assertPhase(@Nullable ReplicationTask task, String phase) {
assertPhase(task, equalTo(phase));
}
private void assertPhase(@Nullable ReplicationTask task, Matcher<String> phaseMatcher) {
if (task != null) {
assertThat(task.getPhase(), phaseMatcher);
}
}
public static class Request extends ReplicationRequest<Request> {
public AtomicBoolean processedOnPrimary = new AtomicBoolean();
public AtomicInteger processedOnReplicas = new AtomicInteger();
public Request() {
}
Request(ShardId shardId) {
this();
this.shardId = shardId;
this.index = shardId.getIndex();
// keep things simple
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
}
static class Response extends ActionWriteResponse {
}
class Action extends TransportReplicationAction<Request, Request, Response> {
Action(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool) {
super(settings, actionName, transportService, clusterService, null, threadPool,
new ShardStateAction(settings, clusterService, transportService, null, null), null,
new ActionFilters(new HashSet<ActionFilter>()), new IndexNameExpressionResolver(Settings.EMPTY), Request.class, Request.class, ThreadPool.Names.SAME);
}
@Override
protected Response newResponseInstance() {
return new Response();
}
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
boolean executedBefore = shardRequest.processedOnPrimary.getAndSet(true);
assert executedBefore == false : "request has already been executed on the primary";
return new Tuple<>(new Response(), shardRequest);
}
@Override
protected void shardOperationOnReplica(Request request) {
request.processedOnReplicas.incrementAndGet();
}
@Override
protected boolean checkWriteConsistency() {
return false;
}
@Override
protected boolean resolveIndex() {
return false;
}
@Override
protected Releasable getIndexShardOperationsCounter(ShardId shardId) {
return getOrCreateIndexShardOperationsCounter();
}
}
class ActionWithConsistency extends Action {
ActionWithConsistency(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) {
super(settings, actionName, transportService, clusterService, threadPool);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
}
/**
* Throws exceptions when executed. Used for testing if the counter is correctly decremented in case an operation fails.
*/
class ActionWithExceptions extends Action {
ActionWithExceptions(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) throws IOException {
super(settings, actionName, transportService, clusterService, threadPool);
}
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
return throwException(shardRequest.shardId());
}
private Tuple<Response, Request> throwException(ShardId shardId) {
try {
if (randomBoolean()) {
// throw a generic exception
// for testing on replica this will actually cause an NPE because it will make the shard fail but
// for this we need an IndicesService which is null.
throw new ElasticsearchException("simulated");
} else {
// throw an exception which will cause retry on primary and be ignored on replica
throw new IndexShardNotStartedException(shardId, IndexShardState.RECOVERING);
}
} catch (Exception e) {
logger.info("throwing ", e);
throw e;
}
}
@Override
protected void shardOperationOnReplica(Request shardRequest) {
throwException(shardRequest.shardId());
}
}
/**
* Delays the operation until countDownLatch is counted down
*/
class ActionWithDelay extends Action {
CountDownLatch countDownLatch = new CountDownLatch(1);
ActionWithDelay(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) throws IOException {
super(settings, actionName, transportService, clusterService, threadPool);
}
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
awaitLatch();
return new Tuple<>(new Response(), shardRequest);
}
private void awaitLatch() throws InterruptedException {
countDownLatch.await();
countDownLatch = new CountDownLatch(1);
}
@Override
protected void shardOperationOnReplica(Request shardRequest) {
try {
awaitLatch();
} catch (InterruptedException e) {
}
}
}
/**
* Transport channel that is needed for replica operation testing.
*/
public TransportChannel createTransportChannel(final PlainActionFuture<Response> listener) {
return new TransportChannel() {
@Override
public String action() {
return null;
}
@Override
public String getProfileName() {
return "";
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
listener.onResponse(((Response) response));
}
@Override
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
listener.onResponse(((Response) response));
}
@Override
public void sendResponse(Throwable error) throws IOException {
listener.onFailure(error);
}
@Override
public long getRequestId() {
return 0;
}
@Override
public String getChannelType() {
return "replica_test";
}
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.