gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.zookeeper;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.component.zookeeper.operations.CreateOperation;
import org.apache.camel.component.zookeeper.operations.DeleteOperation;
import org.apache.camel.component.zookeeper.operations.GetChildrenOperation;
import org.apache.camel.component.zookeeper.operations.OperationResult;
import org.apache.camel.component.zookeeper.operations.SetDataOperation;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.ExchangeHelper;
import org.apache.zookeeper.AsyncCallback.StatCallback;
import org.apache.zookeeper.AsyncCallback.VoidCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getAclListFromMessage;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getCreateMode;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getCreateModeFromString;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getNodeFromMessage;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getPayloadFromExchange;
import static org.apache.camel.component.zookeeper.ZooKeeperUtils.getVersionFromMessage;
/**
* <code>ZooKeeperProducer</code> attempts to set the content of nodes in the
* {@link ZooKeeper} cluster with the payloads of the of the exchanges it
* receives.
*/
@SuppressWarnings("rawtypes")
public class ZooKeeperProducer extends DefaultProducer {
public static final String ZK_OPERATION_WRITE = "WRITE";
public static final String ZK_OPERATION_DELETE = "DELETE";
private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperProducer.class);
private final ZooKeeperConfiguration configuration;
private ZooKeeperConnectionManager zkm;
private ZooKeeper connection;
public ZooKeeperProducer(ZooKeeperEndpoint endpoint) {
super(endpoint);
this.configuration = endpoint.getConfiguration();
this.zkm = endpoint.getConnectionManager();
}
@Override
public void process(Exchange exchange) throws Exception {
if (connection == null) {
connection = this.zkm.getConnection();
}
ProductionContext context = new ProductionContext(connection, exchange);
String operation = exchange.getIn().getHeader(ZooKeeperMessage.ZOOKEEPER_OPERATION, String.class);
boolean isDelete = ZK_OPERATION_DELETE.equals(operation);
if (ExchangeHelper.isOutCapable(exchange)) {
if (isDelete) {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Deleting znode '%s', waiting for confirmation", context.node));
}
OperationResult result = synchronouslyDelete(context);
if (configuration.isListChildren()) {
result = listChildren(context);
}
updateExchangeWithResult(context, result);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Storing data to znode '%s', waiting for confirmation", context.node));
}
OperationResult result = synchronouslySetData(context);
if (configuration.isListChildren()) {
result = listChildren(context);
}
updateExchangeWithResult(context, result);
}
} else {
if (isDelete) {
asynchronouslyDeleteNode(connection, context);
} else {
asynchronouslySetDataOnNode(connection, context);
}
}
}
@Override
protected void doStart() throws Exception {
connection = zkm.getConnection();
if (LOG.isTraceEnabled()) {
LOG.trace(String.format("Starting zookeeper producer of '%s'", configuration.getPath()));
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (LOG.isTraceEnabled()) {
LOG.trace(String.format("Shutting down zookeeper producer of '%s'", configuration.getPath()));
}
zkm.shutdown();
}
private void asynchronouslyDeleteNode(ZooKeeper connection, ProductionContext context) {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Deleting node '%s', not waiting for confirmation", context.node));
}
connection.delete(context.node, context.version, new AsyncDeleteCallback(), context);
}
private void asynchronouslySetDataOnNode(ZooKeeper connection, ProductionContext context) {
if (LOG.isDebugEnabled()) {
LOG.debug(format("Storing data to node '%s', not waiting for confirmation", context.node));
}
connection.setData(context.node, context.payload, context.version, new AsyncSetDataCallback(), context);
}
private void updateExchangeWithResult(ProductionContext context, OperationResult result) {
ZooKeeperMessage out = new ZooKeeperMessage(getEndpoint().getCamelContext(), context.node, result.getStatistics(), context.in.getHeaders());
if (result.isOk()) {
out.setBody(result.getResult());
} else {
context.exchange.setException(result.getException());
}
context.exchange.setOut(out);
}
private OperationResult listChildren(ProductionContext context) throws Exception {
return new GetChildrenOperation(context.connection, configuration.getPath()).get();
}
/** Simple container to avoid passing all these around as parameters */
private class ProductionContext {
ZooKeeper connection;
Exchange exchange;
Message in;
byte[] payload;
int version;
String node;
ProductionContext(ZooKeeper connection, Exchange exchange) {
this.connection = connection;
this.exchange = exchange;
this.in = exchange.getIn();
this.node = getNodeFromMessage(in, configuration.getPath());
this.version = getVersionFromMessage(in);
this.payload = getPayloadFromExchange(exchange);
}
}
private class AsyncSetDataCallback implements StatCallback {
@Override
public void processResult(int rc, String node, Object ctx, Stat statistics) {
if (Code.NONODE.equals(Code.get(rc))) {
if (configuration.isCreate()) {
LOG.warn(format("Node '%s' did not exist, creating it...", node));
ProductionContext context = (ProductionContext)ctx;
OperationResult<String> result = null;
try {
result = createNode(context);
} catch (Exception e) {
LOG.error(format("Error trying to create node '%s'", node), e);
}
if (result == null || !result.isOk()) {
LOG.error(format("Error creating node '%s'", node), result.getException());
}
}
} else {
logStoreComplete(node, statistics);
}
}
}
private class AsyncDeleteCallback implements VoidCallback {
@Override
public void processResult(int rc, String path, Object ctx) {
if (LOG.isDebugEnabled()) {
if (LOG.isTraceEnabled()) {
LOG.trace(format("Removed data node '%s'", path));
} else {
LOG.debug(format("Removed data node '%s'", path));
}
}
}
}
private OperationResult<String> createNode(ProductionContext ctx) throws Exception {
CreateOperation create = new CreateOperation(ctx.connection, ctx.node);
create.setPermissions(getAclListFromMessage(ctx.exchange.getIn()));
CreateMode mode = null;
String modeString = configuration.getCreateMode();
if (modeString != null) {
try {
mode = getCreateModeFromString(modeString, CreateMode.EPHEMERAL);
} catch (Exception e) { }
} else {
mode = getCreateMode(ctx.exchange.getIn(), CreateMode.EPHEMERAL);
}
create.setCreateMode(mode == null ? CreateMode.EPHEMERAL : mode);
create.setData(ctx.payload);
return create.get();
}
/**
* Tries to set the data first and if a no node error is received then an
* attempt will be made to create it instead.
*/
private OperationResult synchronouslySetData(ProductionContext ctx) throws Exception {
SetDataOperation setData = new SetDataOperation(ctx.connection, ctx.node, ctx.payload);
setData.setVersion(ctx.version);
OperationResult result = setData.get();
if (!result.isOk() && configuration.isCreate() && result.failedDueTo(Code.NONODE)) {
LOG.warn(format("Node '%s' did not exist, creating it.", ctx.node));
result = createNode(ctx);
}
return result;
}
private OperationResult synchronouslyDelete(ProductionContext ctx) throws Exception {
DeleteOperation setData = new DeleteOperation(ctx.connection, ctx.node);
setData.setVersion(ctx.version);
OperationResult result = setData.get();
if (!result.isOk() && configuration.isCreate() && result.failedDueTo(Code.NONODE)) {
LOG.warn(format("Node '%s' did not exist, creating it.", ctx.node));
result = createNode(ctx);
}
return result;
}
private void logStoreComplete(String path, Stat statistics) {
if (LOG.isDebugEnabled()) {
if (LOG.isTraceEnabled()) {
LOG.trace(format("Stored data to node '%s', and receive statistics %s", path, statistics));
} else {
LOG.debug(format("Stored data to node '%s'", path));
}
}
}
}
| |
/*
* Copyright (c) 2009-2016 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.gde.core.sceneexplorer.nodes;
import com.jme3.cinematic.MotionPath;
import com.jme3.gde.core.icons.IconList;
import com.jme3.gde.core.scene.SceneApplication;
import com.jme3.gde.core.sceneexplorer.nodes.actions.MotionPathPopup;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Spline;
import com.jme3.scene.Geometry;
import com.jme3.scene.Spatial;
import com.jme3.scene.shape.Curve;
import java.awt.Image;
import java.io.IOException;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.Action;
import org.openide.actions.DeleteAction;
import org.openide.actions.PropertiesAction;
import org.openide.loaders.DataObject;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.actions.SystemAction;
/**
* This Class actually represents the MotionPath in the SceneComposer.<br>
* It is added and managed by {@link JmeMotionPathChildren }
*
* @author MeFisto94
*/
@org.openide.util.lookup.ServiceProvider(service = SceneExplorerNode.class)
@SuppressWarnings({"unchecked", "rawtypes", "OverridableMethodCallInConstructor", "LeakingThisInConstructor"})
public class JmeMotionPath extends AbstractSceneExplorerNode {
private static Image smallImage = IconList.chimpSmile.getImage();
private MotionPath motionPath;
private JmeMotionEvent motionEvent;
private float debugBoxExtents = 0.5f;
private Spatial spatial;
public JmeMotionPath() {
}
public JmeMotionPath(MotionPath motionPath, JmeMotionEvent parent, JmeVector3fChildren children) {
super(children);
this.motionPath = motionPath;
getLookupContents().add(motionPath);
getLookupContents().add(this);
getLookupContents().add(children);
super.setName("MotionPath");
super.setDisplayName("Motion Path");
children.setJmeMotionPath(this);
motionEvent = parent;
updateSpline(false);
}
//<editor-fold desc="Some Overrides for the Node">
@Override
public Image getIcon(int type) {
return smallImage;
}
@Override
public Image getOpenedIcon(int type) {
return smallImage;
}
@Override
public Action[] getActions(boolean context) {
MotionPathPopup m = new MotionPathPopup(this);
return new Action[]{
m.getAddAction(),
m,
SystemAction.get(PropertiesAction.class),
SystemAction.get(DeleteAction.class)
};
}
//</editor-fold>
@Override
protected Sheet createSheet() {
Sheet sheet = super.createSheet();
Sheet.Set set = Sheet.createPropertiesSet();
set.setDisplayName("Motion Path");
set.setName(MotionPath.class.getName());
set.setShortDescription("These are the Properties of the Motion Event's Motion Path");
if (motionPath == null) {
return sheet;
}
Property<?> prop = makeEmbedProperty(this, getExplorerNodeClass(), motionPath.getPathSplineType().getClass(), "getPathSplineType", "setPathSplineType", "PathSplineType");
prop.setShortDescription("Sets the Type of the Paths' Spline. This will define how the single waypoints are interpolated (linear, curvy)");
set.put(prop);
prop = makeEmbedProperty(this, getExplorerNodeClass(), float.class, "getCurveTension", "setCurveTension", "Curve Tension");
prop.setShortDescription("Sets the Curves' Tension. This defines how \"Curvy\" a curve will be. A tension of 0 would be completely linear.");
set.put(prop);
prop = makeProperty(motionPath, boolean.class, "isCycle", "setCycle", "Cycle?");
prop.setShortDescription("Should the Path be a Cycle? This essentially means it will be looped. (Starting from the beginning after we're finished)");
set.put(prop);
prop = makeProperty(motionPath, int.class, "getLength", null, "Path Length");
prop.setShortDescription("This is the total length this path has");
set.put(prop);
prop = makeEmbedProperty(motionPath, motionPath.getClass(), int.class, "getNbWayPoints", null, "Number of Waypoints");
prop.setShortDescription("Shows the Number of Waypoints this Path consists of");
set.put(prop);
sheet.put(set);
set = Sheet.createPropertiesSet();
set.setDisplayName("Motion Path SDK");
set.setName("MotionPathSDK");
set.setShortDescription("These are SDK-dependent Settings which have nothing to do with MotionEvent or MotionPath in the first place.");
prop = makeEmbedProperty(this, JmeMotionPath.class, float.class, "getDebugBoxExtents", "setDebugBoxExtents", "DebugBox Extents");
prop.setShortDescription("The DebugBox Extents defines how big the Debug Boxes (i.e. the Boxes you see for each Waypoint) are. Note: The BoxSize is 2 * extents");
set.put(prop);
sheet.put(set);
return sheet;
}
public MotionPath getMotionPath() {
return motionPath;
}
public JmeMotionEvent getMotionEvent() {
return motionEvent;
}
//<editor-fold desc="Properties Getter/Setter">
public float getDebugBoxExtents() {
return debugBoxExtents;
}
public void setDebugBoxExtents(float extents) {
debugBoxExtents = extents;
if (getChildren() != null) {
for (Node n : getChildren().getNodes()) {
if (n instanceof JmeVector3f) {
((JmeVector3f) n).updateBox();
} else {
Logger.getLogger(JmeMotionPath.class.getName()).log(Level.WARNING, "JmeMotionPath has some unknown Children...");
}
}
}
}
public Spline.SplineType getPathSplineType() {
return motionPath.getPathSplineType();
}
public void setPathSplineType(Spline.SplineType sType) {
if (sType == Spline.SplineType.Nurb) {
Logger.getLogger(JmeMotionPath.class.getName()).log(Level.SEVERE, "Nurb Curves aren't possible at the moment (they require additional helper points). Reverting to Catmull..");
setPathSplineType(Spline.SplineType.CatmullRom);
return;
} else if (sType == Spline.SplineType.Bezier) {
Logger.getLogger(JmeMotionPath.class.getName()).log(Level.SEVERE, "Bezier Curves are bugged and crash the SDK. Reverting to Catmull..");
setPathSplineType(Spline.SplineType.CatmullRom);
return;
}
motionPath.setPathSplineType(sType);
updateSpline(true);
}
public float getCurveTension() {
return motionPath.getCurveTension();
}
public void setCurveTension(float f) {
motionPath.setCurveTension(f);
updateSpline(true);
}
//</editor-fold>
@Override
public Class getExplorerObjectClass() {
return MotionPath.class;
}
@Override
public Class getExplorerNodeClass() {
return JmeMotionPath.class;
}
@Override
public org.openide.nodes.Node[] createNodes(Object key, DataObject key2, boolean cookie) {
return null;
}
public void refreshChildren() {
((JmeVector3fChildren) this.jmeChildren).refreshChildren(true);
updateSpline(false);
}
@Override
public void destroy() throws IOException {
for (Node n : getChildren().getNodes()) {
((JmeVector3f) n).destroy();
}
super.destroy();
((AbstractSceneExplorerNode) getParentNode()).refresh(true);
}
public void enableDebugShapes() {
for (Node n : getChildren().getNodes()) {
if (n instanceof JmeVector3f) {
((JmeVector3f) n).attachBox(((JmeVector3f) n).spatial, this);
}
}
updateSpline(false);
}
public void disableDebugShapes() {
for (Node n : getChildren().getNodes()) {
if (n instanceof JmeVector3f) {
((JmeVector3f) n).detachBox(((JmeVector3f) n).spatial);
}
}
if (spatial != null) {
final Spatial spat = spatial;
SceneApplication.getApplication().enqueue(new Callable<Void>() {
@Override
public Void call() throws Exception {
spat.removeFromParent();
return null;
}
});
}
}
/**
* Call this to update the visual Spline.
*
* @param wasModified If the Spatial was Modified and hence the dirty-safe
* flag should be triggered (only false for the Constructors first
* initiation)
*/
public void updateSpline(boolean wasModified) {
if (spatial != null) {
final Spatial spat = spatial;
SceneApplication.getApplication().enqueue(new Callable<Void>() {
@Override
public Void call() throws Exception {
spat.removeFromParent();
return null;
}
});
}
Material m = new Material(SceneApplication.getApplication().getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md");
m.setColor("Color", ColorRGBA.Red);
m.getAdditionalRenderState().setLineWidth(4f);
switch (motionPath.getPathSplineType()) {
case CatmullRom:
Geometry geo = new Geometry("Curve", new Curve(motionPath.getSpline(), 10));
geo.setMaterial(m);
spatial = geo;
break;
case Linear:
geo = new Geometry("Curve", new Curve(motionPath.getSpline(), 0));
geo.setMaterial(m);
spatial = geo;
break;
default:
geo = new Geometry("Curve", new Curve(motionPath.getSpline(), 10));
geo.setMaterial(m);
spatial = geo;
break;
}
final Spatial spat = spatial;
SceneApplication.getApplication().enqueue(new Callable<Void>() {
@Override
public Void call() throws Exception {
SceneApplication.getApplication().getRootNode().attachChild(spat);
return null;
}
});
if (wasModified) {
motionEvent.setModified(true);
}
}
}
| |
package org.bsworks.x2.util.sql.dialect;
import java.util.List;
import java.util.Map;
import org.bsworks.x2.resource.RangeSpec;
import org.bsworks.x2.services.persistence.ParameterValue;
import org.bsworks.x2.services.persistence.ParameterValuesFactory;
import org.bsworks.x2.services.persistence.PersistentValueType;
/**
* SQL dialect implementation for <i>PostgreSQL</i>.
*
* @author Lev Himmelfarb
*/
class PostgreSQLDialect
implements SQLDialect {
/* (non-Javadoc)
* See overridden method.
*/
@Override
public boolean tempTablesRequireReadWrite() {
return true;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String quoteColumnLabel(final String colLabel) {
return "\"" + colLabel.replace("\"", "\"\"") + "\"";
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String nullableConcat(final String stringLiteral,
final String selectExpr) {
return "'" + stringLiteral.replace("'", "''") + "' || " + selectExpr;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String castToString(final String valExpr) {
return "CAST(" + valExpr + " AS VARCHAR)";
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String stringLength(final String valExpr) {
return "LENGTH(" + valExpr + ")";
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String stringLowercase(final String valExpr) {
return "LOWER(" + valExpr + ")";
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String stringLeftPad(final String valExpr, final int width,
final char paddingChar) {
return "LPAD(" + valExpr + ", " + width + ", '"
+ (paddingChar == '\'' ? "''" : String.valueOf(paddingChar))
+ "')";
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String stringSubstring(final String valExpr, final int from,
final int length) {
return "SUBSTRING(" + valExpr + " FROM " + (from + 1)
+ (length > 0 ? " FOR " + length : "")
+ ")";
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String regularExpressionMatch(final String valExpr,
final String reExpr, final boolean negate,
final boolean caseSensitive) {
final String resExpr;
if (negate) {
if (caseSensitive)
resExpr = valExpr + " !~ " + reExpr;
else
resExpr = valExpr + " !~* " + reExpr;
} else {
if (caseSensitive)
resExpr = valExpr + " ~ " + reExpr;
else
resExpr = valExpr + " ~* " + reExpr;
}
return resExpr;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String substringMatch(final String valExpr,
final String substringExpr, final boolean negate,
final boolean caseSensitive) {
final String patternExpr =
"REGEXP_REPLACE(" + substringExpr
+ ", '([%_\\\\])', '\\\\\\1', 'g')";
final String resExpr;
if (negate) {
if (caseSensitive)
resExpr =
valExpr + " NOT LIKE '%' || " + patternExpr + " || '%'";
else
resExpr =
valExpr + " NOT ILIKE '%' || " + patternExpr + " || '%'";
} else {
if (caseSensitive)
resExpr = valExpr + " LIKE '%' || " + patternExpr + " || '%'";
else
resExpr = valExpr + " ILIKE '%' || " + patternExpr + " || '%'";
}
return resExpr;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String prefixMatch(final String valExpr, final String prefixExpr,
final boolean negate, final boolean caseSensitive) {
final String patternExpr =
"REGEXP_REPLACE(" + prefixExpr + ", '([%_\\\\])', '\\\\\\1', 'g')";
final String resExpr;
if (negate) {
if (caseSensitive)
resExpr = valExpr + " NOT LIKE " + patternExpr + " || '%'";
else
resExpr = valExpr + " NOT ILIKE " + patternExpr + " || '%'";
} else {
if (caseSensitive)
resExpr = valExpr + " LIKE " + patternExpr + " || '%'";
else
resExpr = valExpr + " ILIKE " + patternExpr + " || '%'";
}
return resExpr;
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public <V extends ParameterValue> String makeRangedSelect(
final String selectQuery, final RangeSpec range,
final ParameterValuesFactory<V> paramsFactory,
final Map<String, V> params) {
final StringBuilder res = new StringBuilder(selectQuery.length()
+ (" LIMIT ?" + RANGE_LIMIT_PARAM
+ " OFFSET ?" + RANGE_OFFSET_PARAM).length());
res.append(selectQuery);
res.append(" LIMIT ?").append(RANGE_LIMIT_PARAM);
params.put(RANGE_LIMIT_PARAM,
paramsFactory.getParameterValue(PersistentValueType.NUMERIC,
Integer.valueOf(range.getMaxRecords())));
if (range.getFirstRecord() > 0) {
res.append(" OFFSET ?").append(RANGE_OFFSET_PARAM);
params.put(RANGE_OFFSET_PARAM,
paramsFactory.getParameterValue(
PersistentValueType.NUMERIC,
Integer.valueOf(range.getFirstRecord())));
}
return res.toString();
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public void makeSelectIntoTempTable(final String tempTableName,
final boolean create, final String selectQuery,
final List<String> preStatements,
final List<String> postStatements) {
if (create) {
preStatements.add("CREATE TEMPORARY TABLE " + tempTableName
+ " ON COMMIT DROP AS " + selectQuery);
} else {
preStatements.add("TRUNCATE TABLE " + tempTableName);
preStatements.add("INSERT INTO " + tempTableName + " "
+ selectQuery);
}
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String makeSelectWithShareLock(final String selectQuery,
final String... lockTables) {
final StringBuilder res = new StringBuilder(selectQuery.length() + 128);
res.append(selectQuery).append(" FOR SHARE");
if (lockTables.length > 0) {
res.append(" OF ");
for (int i = 0; i < lockTables.length; i++) {
if (i > 0)
res.append(", ");
res.append(lockTables[i]);
}
}
return res.toString();
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String makeSelectWithExclusiveLock(final String selectQuery,
final String... lockTables) {
final StringBuilder res = new StringBuilder(selectQuery.length() + 128);
res.append(selectQuery).append(" FOR UPDATE");
if (lockTables.length > 0) {
res.append(" OF ");
for (int i = 0; i < lockTables.length; i++) {
if (i > 0)
res.append(", ");
res.append(lockTables[i]);
}
}
return res.toString();
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String createDeleteFromAliasedTable(final String fromTable,
final String fromTableAlias, final String whereClause) {
return "DELETE FROM " + fromTable + " AS " + fromTableAlias
+ (whereClause == null ? "" : " WHERE " + whereClause);
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String createDeleteWithJoins(final String fromTable,
final String fromTableAlias, final String joinedTables,
final String joinConditions, final String whereClause) {
return "DELETE FROM " + fromTable + " AS " + fromTableAlias
+ " USING " + joinedTables + " WHERE " + joinConditions
+ (whereClause == null ? "" : " AND (" + whereClause + ")");
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String createUpdateWithJoins(final String table,
final String tableAlias, final String setClause,
final String joinedTables, final String joinConditions,
final String whereClause) {
return "UPDATE " + table + " AS " + tableAlias + " SET " + setClause
+ " FROM " + joinedTables + " WHERE " + joinConditions
+ (whereClause == null ? "" : " AND (" + whereClause + ")");
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String lockTablesInShareMode(final String... tables) {
final StringBuilder stmt = new StringBuilder(256);
stmt.append("LOCK TABLE ");
for (int i = 0; i < tables.length; i++) {
if (i > 0)
stmt.append(", ");
stmt.append(tables[i]);
}
stmt.append(" IN SHARE MODE");
return stmt.toString();
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String lockTablesInExclusiveMode(final String... tables) {
final StringBuilder stmt = new StringBuilder(256);
stmt.append("LOCK TABLE ");
for (int i = 0; i < tables.length; i++) {
if (i > 0)
stmt.append(", ");
stmt.append(tables[i]);
}
stmt.append(" IN EXCLUSIVE MODE");
return stmt.toString();
}
/* (non-Javadoc)
* See overridden method.
*/
@Override
public String unlockTables(final String... tables) {
return null;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.clonerow;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.clonerow.CloneRowMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class CloneRowDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = CloneRowMeta.class; // for i18n purposes, needed by Translator2!!
private CloneRowMeta input;
// nr clones
private Label wlnrClone;
private TextVar wnrClone;
private FormData fdlnrClone, fdnrClone;
private Label wlcloneFlagField, wladdCloneNum, wlCloneNumField;
private TextVar wcloneFlagField, wCloneNumField;
private FormData fdlcloneFlagField, fdcloneFlagField, fdladdCloneNum, fdCloneNumField;
private FormData fdOutpuFields;
private Group wOutpuFields;
private Label wladdCloneFlag;
private Button waddCloneFlag, waddCloneNum;
private FormData fdladdCloneFlag, fdaddCloneFlag, fdaddCloneNum;
private Label wlisNrCloneInField, wlNrCloneField;
private CCombo wNrCloneField;
private FormData fdlisNrCloneInField, fdisNrCloneInField;
private FormData fdlNrCloneField, fdNrCloneField;
private Button wisNrCloneInField;
private boolean gotPreviousFields = false;
public CloneRowDialog( Shell parent, Object in, TransMeta tr, String sname ) {
super( parent, (BaseStepMeta) in, tr, sname );
input = (CloneRowMeta) in;
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "CloneRowDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "CloneRowDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// Number of clones line
wlnrClone = new Label( shell, SWT.RIGHT );
wlnrClone.setText( BaseMessages.getString( PKG, "CloneRowDialog.nrClone.Label" ) );
props.setLook( wlnrClone );
fdlnrClone = new FormData();
fdlnrClone.left = new FormAttachment( 0, 0 );
fdlnrClone.right = new FormAttachment( middle, -margin );
fdlnrClone.top = new FormAttachment( wStepname, margin * 2 );
wlnrClone.setLayoutData( fdlnrClone );
wnrClone = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wnrClone );
wnrClone.setToolTipText( BaseMessages.getString( PKG, "CloneRowDialog.nrClone.Tooltip" ) );
wnrClone.addModifyListener( lsMod );
fdnrClone = new FormData();
fdnrClone.left = new FormAttachment( middle, 0 );
fdnrClone.top = new FormAttachment( wStepname, margin * 2 );
fdnrClone.right = new FormAttachment( 100, 0 );
wnrClone.setLayoutData( fdnrClone );
// Is Nr clones defined in a Field
wlisNrCloneInField = new Label( shell, SWT.RIGHT );
wlisNrCloneInField.setText( BaseMessages.getString( PKG, "CloneRowDialog.isNrCloneInField.Label" ) );
props.setLook( wlisNrCloneInField );
fdlisNrCloneInField = new FormData();
fdlisNrCloneInField.left = new FormAttachment( 0, 0 );
fdlisNrCloneInField.top = new FormAttachment( wnrClone, margin );
fdlisNrCloneInField.right = new FormAttachment( middle, -margin );
wlisNrCloneInField.setLayoutData( fdlisNrCloneInField );
wisNrCloneInField = new Button( shell, SWT.CHECK );
props.setLook( wisNrCloneInField );
wisNrCloneInField.setToolTipText( BaseMessages.getString( PKG, "CloneRowDialog.isNrCloneInField.Tooltip" ) );
fdisNrCloneInField = new FormData();
fdisNrCloneInField.left = new FormAttachment( middle, 0 );
fdisNrCloneInField.top = new FormAttachment( wnrClone, margin );
wisNrCloneInField.setLayoutData( fdisNrCloneInField );
SelectionAdapter lisNrCloneInField = new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
ActiveisNrCloneInField();
input.setChanged();
}
};
wisNrCloneInField.addSelectionListener( lisNrCloneInField );
// Filename field
wlNrCloneField = new Label( shell, SWT.RIGHT );
wlNrCloneField.setText( BaseMessages.getString( PKG, "CloneRowDialog.wlNrCloneField.Label" ) );
props.setLook( wlNrCloneField );
fdlNrCloneField = new FormData();
fdlNrCloneField.left = new FormAttachment( 0, 0 );
fdlNrCloneField.top = new FormAttachment( wisNrCloneInField, margin );
fdlNrCloneField.right = new FormAttachment( middle, -margin );
wlNrCloneField.setLayoutData( fdlNrCloneField );
wNrCloneField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
wNrCloneField.setEditable( true );
props.setLook( wNrCloneField );
wNrCloneField.addModifyListener( lsMod );
fdNrCloneField = new FormData();
fdNrCloneField.left = new FormAttachment( middle, 0 );
fdNrCloneField.top = new FormAttachment( wisNrCloneInField, margin );
fdNrCloneField.right = new FormAttachment( 100, 0 );
wNrCloneField.setLayoutData( fdNrCloneField );
wNrCloneField.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT );
shell.setCursor( busy );
setisNrCloneInField();
shell.setCursor( null );
busy.dispose();
}
} );
// ///////////////////////////////
// START OF Origin files GROUP //
// ///////////////////////////////
wOutpuFields = new Group( shell, SWT.SHADOW_NONE );
props.setLook( wOutpuFields );
wOutpuFields.setText( BaseMessages.getString( PKG, "CloneRowDialog.wOutpuFields.Label" ) );
FormLayout OutpuFieldsgroupLayout = new FormLayout();
OutpuFieldsgroupLayout.marginWidth = 10;
OutpuFieldsgroupLayout.marginHeight = 10;
wOutpuFields.setLayout( OutpuFieldsgroupLayout );
// add clone flag?
wladdCloneFlag = new Label( wOutpuFields, SWT.RIGHT );
wladdCloneFlag.setText( BaseMessages.getString( PKG, "CloneRowDialog.addCloneFlag.Label" ) );
props.setLook( wladdCloneFlag );
fdladdCloneFlag = new FormData();
fdladdCloneFlag.left = new FormAttachment( 0, 0 );
fdladdCloneFlag.top = new FormAttachment( wNrCloneField, 2 * margin );
fdladdCloneFlag.right = new FormAttachment( middle, -margin );
wladdCloneFlag.setLayoutData( fdladdCloneFlag );
waddCloneFlag = new Button( wOutpuFields, SWT.CHECK );
waddCloneFlag.setToolTipText( BaseMessages.getString( PKG, "CloneRowDialog.addCloneFlag.Tooltip" ) );
props.setLook( waddCloneFlag );
fdaddCloneFlag = new FormData();
fdaddCloneFlag.left = new FormAttachment( middle, 0 );
fdaddCloneFlag.top = new FormAttachment( wNrCloneField, 2 * margin );
fdaddCloneFlag.right = new FormAttachment( 100, 0 );
waddCloneFlag.setLayoutData( fdaddCloneFlag );
SelectionAdapter lsSelR = new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
input.setChanged();
activeaddCloneFlag();
}
};
waddCloneFlag.addSelectionListener( lsSelR );
// clone falg field line
wlcloneFlagField = new Label( wOutpuFields, SWT.RIGHT );
wlcloneFlagField.setText( BaseMessages.getString( PKG, "CloneRowDialog.cloneFlagField.Label" ) );
props.setLook( wlcloneFlagField );
fdlcloneFlagField = new FormData();
fdlcloneFlagField.left = new FormAttachment( 0, 0 );
fdlcloneFlagField.right = new FormAttachment( middle, -margin );
fdlcloneFlagField.top = new FormAttachment( waddCloneFlag, margin * 2 );
wlcloneFlagField.setLayoutData( fdlcloneFlagField );
wcloneFlagField = new TextVar( transMeta, wOutpuFields, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wcloneFlagField );
wcloneFlagField.setToolTipText( BaseMessages.getString( PKG, "CloneRowDialog.cloneFlagField.Tooltip" ) );
wcloneFlagField.addModifyListener( lsMod );
fdcloneFlagField = new FormData();
fdcloneFlagField.left = new FormAttachment( middle, 0 );
fdcloneFlagField.top = new FormAttachment( waddCloneFlag, margin * 2 );
fdcloneFlagField.right = new FormAttachment( 100, 0 );
wcloneFlagField.setLayoutData( fdcloneFlagField );
// add clone num?
wladdCloneNum = new Label( wOutpuFields, SWT.RIGHT );
wladdCloneNum.setText( BaseMessages.getString( PKG, "CloneRowDialog.addCloneNum.Label" ) );
props.setLook( wladdCloneNum );
fdladdCloneNum = new FormData();
fdladdCloneNum.left = new FormAttachment( 0, 0 );
fdladdCloneNum.top = new FormAttachment( wcloneFlagField, margin );
fdladdCloneNum.right = new FormAttachment( middle, -margin );
wladdCloneNum.setLayoutData( fdladdCloneNum );
waddCloneNum = new Button( wOutpuFields, SWT.CHECK );
waddCloneNum.setToolTipText( BaseMessages.getString( PKG, "CloneRowDialog.addCloneNum.Tooltip" ) );
props.setLook( waddCloneNum );
fdaddCloneNum = new FormData();
fdaddCloneNum.left = new FormAttachment( middle, 0 );
fdaddCloneNum.top = new FormAttachment( wcloneFlagField, margin );
fdaddCloneNum.right = new FormAttachment( 100, 0 );
waddCloneNum.setLayoutData( fdaddCloneNum );
waddCloneNum.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
input.setChanged();
activeaddCloneNum();
}
} );
// clone num field line
wlCloneNumField = new Label( wOutpuFields, SWT.RIGHT );
wlCloneNumField.setText( BaseMessages.getString( PKG, "CloneRowDialog.cloneNumField.Label" ) );
props.setLook( wlCloneNumField );
fdlcloneFlagField = new FormData();
fdlcloneFlagField.left = new FormAttachment( 0, 0 );
fdlcloneFlagField.right = new FormAttachment( middle, -margin );
fdlcloneFlagField.top = new FormAttachment( waddCloneNum, margin );
wlCloneNumField.setLayoutData( fdlcloneFlagField );
wCloneNumField = new TextVar( transMeta, wOutpuFields, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wCloneNumField );
wCloneNumField.setToolTipText( BaseMessages.getString( PKG, "CloneRowDialog.cloneNumField.Tooltip" ) );
wCloneNumField.addModifyListener( lsMod );
fdCloneNumField = new FormData();
fdCloneNumField.left = new FormAttachment( middle, 0 );
fdCloneNumField.top = new FormAttachment( waddCloneNum, margin );
fdCloneNumField.right = new FormAttachment( 100, 0 );
wCloneNumField.setLayoutData( fdCloneNumField );
fdOutpuFields = new FormData();
fdOutpuFields.left = new FormAttachment( 0, margin );
fdOutpuFields.top = new FormAttachment( wNrCloneField, 2 * margin );
fdOutpuFields.right = new FormAttachment( 100, -margin );
wOutpuFields.setLayoutData( fdOutpuFields );
// ///////////////////////////////////////////////////////////
// / END OF Origin files GROUP
// ///////////////////////////////////////////////////////////
// Some buttons
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, wOutpuFields );
// Add listeners
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
wCancel.addListener( SWT.Selection, lsCancel );
wOK.addListener( SWT.Selection, lsOK );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
activeaddCloneFlag();
ActiveisNrCloneInField();
activeaddCloneNum();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
private void setisNrCloneInField() {
if ( !gotPreviousFields ) {
try {
String field = wNrCloneField.getText();
wNrCloneField.removeAll();
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null ) {
wNrCloneField.setItems( r.getFieldNames() );
}
if ( field != null ) {
wNrCloneField.setText( field );
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "CloneRowDialog.FailedToGetFields.DialogTitle" ), BaseMessages
.getString( PKG, "CloneRowDialog.FailedToGetFields.DialogMessage" ), ke );
}
gotPreviousFields = true;
}
}
private void ActiveisNrCloneInField() {
wlNrCloneField.setEnabled( wisNrCloneInField.getSelection() );
wNrCloneField.setEnabled( wisNrCloneInField.getSelection() );
wlnrClone.setEnabled( !wisNrCloneInField.getSelection() );
wnrClone.setEnabled( !wisNrCloneInField.getSelection() );
}
private void activeaddCloneFlag() {
wlcloneFlagField.setEnabled( waddCloneFlag.getSelection() );
wcloneFlagField.setEnabled( waddCloneFlag.getSelection() );
}
private void activeaddCloneNum() {
wlCloneNumField.setEnabled( waddCloneNum.getSelection() );
wCloneNumField.setEnabled( waddCloneNum.getSelection() );
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
if ( input.getNrClones() != null ) {
wnrClone.setText( input.getNrClones() );
}
waddCloneFlag.setSelection( input.isAddCloneFlag() );
if ( input.getCloneFlagField() != null ) {
wcloneFlagField.setText( input.getCloneFlagField() );
}
wisNrCloneInField.setSelection( input.isNrCloneInField() );
if ( input.getNrCloneField() != null ) {
wNrCloneField.setText( input.getNrCloneField() );
}
waddCloneNum.setSelection( input.isAddCloneNum() );
if ( input.getCloneNumField() != null ) {
wCloneNumField.setText( input.getCloneNumField() );
}
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
return;
}
stepname = wStepname.getText(); // return value
input.setNrClones( wnrClone.getText() );
input.setAddCloneFlag( waddCloneFlag.getSelection() );
input.setCloneFlagField( wcloneFlagField.getText() );
input.setNrCloneInField( wisNrCloneInField.getSelection() );
input.setNrCloneField( wNrCloneField.getText() );
input.setAddCloneNum( waddCloneNum.getSelection() );
input.setCloneNumField( wCloneNumField.getText() );
dispose();
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.http;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.rest.*;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.rest.RestStatus.*;
/**
*
*/
public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
private final Environment environment;
private final HttpServerTransport transport;
private final RestController restController;
private final NodeService nodeService;
private final boolean disableSites;
private final PluginSiteFilter pluginSiteFilter = new PluginSiteFilter();
@Inject
public HttpServer(Settings settings, Environment environment, HttpServerTransport transport,
RestController restController,
NodeService nodeService) {
super(settings);
this.environment = environment;
this.transport = transport;
this.restController = restController;
this.nodeService = nodeService;
nodeService.setHttpServer(this);
this.disableSites = componentSettings.getAsBoolean("disable_sites", false);
transport.httpServerAdapter(new Dispatcher(this));
}
static class Dispatcher implements HttpServerAdapter {
private final HttpServer server;
Dispatcher(HttpServer server) {
this.server = server;
}
@Override
public void dispatchRequest(HttpRequest request, HttpChannel channel) {
server.internalDispatchRequest(request, channel);
}
}
@Override
protected void doStart() throws ElasticSearchException {
transport.start();
if (logger.isInfoEnabled()) {
logger.info("{}", transport.boundAddress());
}
nodeService.putAttribute("http_address", transport.boundAddress().publishAddress().toString());
}
@Override
protected void doStop() throws ElasticSearchException {
nodeService.removeAttribute("http_address");
transport.stop();
}
@Override
protected void doClose() throws ElasticSearchException {
transport.close();
}
public HttpInfo info() {
return transport.info();
}
public HttpStats stats() {
return transport.stats();
}
public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) {
if (request.rawPath().startsWith("/_plugin/")) {
RestFilterChain filterChain = restController.filterChain(pluginSiteFilter);
filterChain.continueProcessing(request, channel);
return;
}
restController.dispatchRequest(request, channel);
}
class PluginSiteFilter extends RestFilter {
@Override
public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) {
handlePluginSite((HttpRequest) request, (HttpChannel) channel);
}
}
void handlePluginSite(HttpRequest request, HttpChannel channel) {
if (disableSites) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
if (request.method() == RestRequest.Method.OPTIONS) {
// when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added)
StringRestResponse response = new StringRestResponse(OK);
channel.sendResponse(response);
return;
}
if (request.method() != RestRequest.Method.GET) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
// TODO for a "/_plugin" endpoint, we should have a page that lists all the plugins?
String path = request.rawPath().substring("/_plugin/".length());
int i1 = path.indexOf('/');
String pluginName;
String sitePath;
if (i1 == -1) {
pluginName = path;
sitePath = null;
// TODO This is a path in the form of "/_plugin/head", without a trailing "/", which messes up
// resources fetching if it does not exists, a better solution would be to send a redirect
channel.sendResponse(new StringRestResponse(NOT_FOUND));
return;
} else {
pluginName = path.substring(0, i1);
sitePath = path.substring(i1 + 1);
}
if (sitePath.length() == 0) {
sitePath = "/index.html";
}
// Convert file separators.
sitePath = sitePath.replace('/', File.separatorChar);
// this is a plugin provided site, serve it as static files from the plugin location
File siteFile = new File(new File(environment.pluginsFile(), pluginName), "_site");
File file = new File(siteFile, sitePath);
if (!file.exists() || file.isHidden()) {
channel.sendResponse(new StringRestResponse(NOT_FOUND));
return;
}
if (!file.isFile()) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
if (!file.getAbsolutePath().startsWith(siteFile.getAbsolutePath())) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
try {
byte[] data = Streams.copyToByteArray(file);
channel.sendResponse(new BytesRestResponse(data, guessMimeType(sitePath)));
} catch (IOException e) {
channel.sendResponse(new StringRestResponse(INTERNAL_SERVER_ERROR));
}
}
// TODO: Don't respond with a mime type that violates the request's Accept header
private String guessMimeType(String path) {
int lastDot = path.lastIndexOf('.');
if (lastDot == -1) {
return "";
}
String extension = path.substring(lastDot + 1).toLowerCase();
String mimeType = DEFAULT_MIME_TYPES.get(extension);
if (mimeType == null) {
return "";
}
return mimeType;
}
static {
// This is not an exhaustive list, just the most common types. Call registerMimeType() to add more.
Map<String, String> mimeTypes = new HashMap<String, String>();
mimeTypes.put("txt", "text/plain");
mimeTypes.put("css", "text/css");
mimeTypes.put("csv", "text/csv");
mimeTypes.put("htm", "text/html");
mimeTypes.put("html", "text/html");
mimeTypes.put("xml", "text/xml");
mimeTypes.put("js", "text/javascript"); // Technically it should be application/javascript (RFC 4329), but IE8 struggles with that
mimeTypes.put("xhtml", "application/xhtml+xml");
mimeTypes.put("json", "application/json");
mimeTypes.put("pdf", "application/pdf");
mimeTypes.put("zip", "application/zip");
mimeTypes.put("tar", "application/x-tar");
mimeTypes.put("gif", "image/gif");
mimeTypes.put("jpeg", "image/jpeg");
mimeTypes.put("jpg", "image/jpeg");
mimeTypes.put("tiff", "image/tiff");
mimeTypes.put("tif", "image/tiff");
mimeTypes.put("png", "image/png");
mimeTypes.put("svg", "image/svg+xml");
mimeTypes.put("ico", "image/vnd.microsoft.icon");
mimeTypes.put("mp3", "audio/mpeg");
DEFAULT_MIME_TYPES = ImmutableMap.copyOf(mimeTypes);
}
public static final Map<String, String> DEFAULT_MIME_TYPES;
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.core.database;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.row.ValueMetaInterface;
/**
* Contains SQLite specific information through static final members
*
* @author Matt
* @since 11-mrt-2005
*/
public class SQLiteDatabaseMeta extends BaseDatabaseMeta implements DatabaseInterface {
@Override
public int[] getAccessTypeList() {
return new int[] {
DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_ODBC, DatabaseMeta.TYPE_ACCESS_JNDI };
}
/**
* @see org.pentaho.di.core.database.DatabaseInterface#getNotFoundTK(boolean)
*/
@Override
public int getNotFoundTK( boolean use_autoinc ) {
if ( supportsAutoInc() && use_autoinc ) {
return 1;
}
return super.getNotFoundTK( use_autoinc );
}
@Override
public String getDriverClass() {
if ( getAccessType() == DatabaseMeta.TYPE_ACCESS_NATIVE ) {
return "org.sqlite.JDBC";
} else {
return "sun.jdbc.odbc.JdbcOdbcDriver"; // always ODBC!
}
}
@Override
public String getURL( String hostname, String port, String databaseName ) {
if ( getAccessType() == DatabaseMeta.TYPE_ACCESS_NATIVE ) {
return "jdbc:sqlite:" + databaseName;
} else {
return "jdbc:odbc:" + databaseName;
}
}
/**
* Checks whether or not the command setFetchSize() is supported by the JDBC driver...
*
* @return true is setFetchSize() is supported!
*/
@Override
public boolean isFetchSizeSupported() {
return false;
}
/**
* @see org.pentaho.di.core.database.DatabaseInterface#getSchemaTableCombination(java.lang.String, java.lang.String)
*/
@Override
@SuppressWarnings( "deprecation" )
public String getSchemaTableCombination( String schema_name, String table_part ) {
return getBackwardsCompatibleSchemaTableCombination( schema_name, table_part );
}
/**
* @return true if the database supports bitmap indexes
*/
@Override
public boolean supportsBitmapIndex() {
return false;
}
/**
* @param tableName
* The table to be truncated.
* @return The SQL statement to truncate a table: remove all rows from it without a transaction
*/
@Override
public String getTruncateTableStatement( String tableName ) {
return "DELETE FROM " + tableName;
}
/**
* Generates the SQL statement to add a column to the specified table For this generic type, i set it to the most
* common possibility.
*
* @param tablename
* The table to add
* @param v
* The column defined as a value
* @param tk
* the name of the technical key field
* @param use_autoinc
* whether or not this field uses auto increment
* @param pk
* the name of the primary key field
* @param semicolon
* whether or not to add a semi-colon behind the statement.
* @return the SQL statement to add a column to the specified table
*/
@Override
public String getAddColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean use_autoinc,
String pk, boolean semicolon ) {
return "ALTER TABLE " + tablename + " ADD " + getFieldDefinition( v, tk, pk, use_autoinc, true, false );
}
/**
* Generates the SQL statement to modify a column in the specified table
*
* @param tablename
* The table to add
* @param v
* The column defined as a value
* @param tk
* the name of the technical key field
* @param use_autoinc
* whether or not this field uses auto increment
* @param pk
* the name of the primary key field
* @param semicolon
* whether or not to add a semi-colon behind the statement.
* @return the SQL statement to modify a column in the specified table
*/
@Override
public String getModifyColumnStatement( String tablename, ValueMetaInterface v, String tk, boolean use_autoinc,
String pk, boolean semicolon ) {
return "ALTER TABLE " + tablename + " MODIFY " + getFieldDefinition( v, tk, pk, use_autoinc, true, false );
}
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean use_autoinc,
boolean add_fieldname, boolean add_cr ) {
String retval = "";
String fieldname = v.getName();
int length = v.getLength();
int precision = v.getPrecision();
if ( add_fieldname ) {
retval += fieldname + " ";
}
int type = v.getType();
switch ( type ) {
case ValueMetaInterface.TYPE_DATE:
retval += "DATETIME";
break; // There is no Date or Timestamp data type in SQLite!!!
case ValueMetaInterface.TYPE_BOOLEAN:
retval += "CHAR(1)";
break;
case ValueMetaInterface.TYPE_NUMBER:
case ValueMetaInterface.TYPE_INTEGER:
case ValueMetaInterface.TYPE_BIGNUMBER:
if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
fieldname.equalsIgnoreCase( pk ) // Primary key
) {
retval += "INTEGER PRIMARY KEY AUTOINCREMENT";
} else {
if ( precision != 0 || length < 0 || length > 18 ) {
retval += "NUMERIC";
} else {
retval += "INTEGER";
}
}
break;
case ValueMetaInterface.TYPE_STRING:
if ( length >= DatabaseMeta.CLOB_LENGTH ) {
retval += "BLOB";
} else {
retval += "TEXT";
}
break;
case ValueMetaInterface.TYPE_BINARY:
retval += "BLOB";
break;
default:
retval += "UNKNOWN";
break;
}
if ( add_cr ) {
retval += Const.CR;
}
return retval;
}
@Override
public String[] getUsedLibraries() {
return new String[] { "sqlite-jdbc-3.7.2.jar" };
}
/**
* @return true if the database supports error handling (the default). Returns false for certain databases (SQLite)
* that invalidate a prepared statement or even the complete connection when an error occurs.
*/
@Override
public boolean supportsErrorHandling() {
return false;
}
}
| |
package org.sdw.ingestion.plugin;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.io.OutputFormat;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.graph.Edge;
import org.apache.flink.graph.Graph;
import org.apache.flink.graph.Vertex;
import org.apache.flink.util.Collector;
import org.openrdf.model.Literal;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.sdw.ingestion.backend.flink.FlinkBackend;
import org.sdw.ingestion.backend.flink.IngestionBackend;
import org.sdw.ingestion.exception.IngestionException;
import org.sdw.ingestion.graph.Node;
import org.sdw.ingestion.graph.NodeLiteralObject;
import org.sdw.ingestion.graph.NodeObject;
import org.sdw.ingestion.graph.NodeUriObject;
/**
* Data type for a Flink Graph
*
* @author kay
*
*/
public class FlinkGraphDataType extends GraphDataType {
/** graph instance */
final Graph<String, Node, String> graph;
public FlinkGraphDataType(final Graph<String, Node, String> graph) {
this.graph = graph;
}
public FlinkGraphDataType(final List<Statement[]> statementLists, final FlinkBackend flinkBackend) throws IngestionException {
Graph<String, Node, String> graph = this.convertStatementsToGraph(flinkBackend, statementLists);
this.graph = graph;
}
public Graph<String, Node, String> getFlinkGraph() {
return this.graph;
}
/**
* This method can be used to create a graph dataset from a list of Sesame/RDF4J statements
*
* @param backend
* @param statementLists
* @return
* @throws IngestionException
*/
protected Graph<String, Node, String> convertStatementsToGraph(final IngestionBackend backend, final List<Statement[]> statementLists) throws IngestionException {
if (null == backend || null == statementLists) {
return null;
}
if (false == backend instanceof FlinkBackend) {
throw new IngestionException("Only supports Apache Flink backend!");
}
FlinkBackend flinkBackend = (FlinkBackend) backend;
Configuration config = flinkBackend.getConfig().getSourceConfig();
ExecutionEnvironment env = flinkBackend.getExecutionEnvironment();
DataSet<Statement[]> datasetGraphs = env.fromCollection(statementLists);
// set up the execution environment
DataSet<Edge<String, String>> edges = datasetGraphs.flatMap(new EdgeExtractor(config));
DataSet<Vertex<String, Node>> initialVertices = datasetGraphs.map(new VertexExtractor(config));
Graph<String, Node, String> rdfGraph = Graph.fromDataSet(initialVertices, edges, env);
return rdfGraph;
}
/**
* Class which can be used to obtain vertex/instance/node properties from input statements
*
* @author kay
*
*/
private static final class VertexExtractor implements MapFunction<Statement[], Vertex<String, Node>> {
private static final long serialVersionUID = -8875465958660563458L;
/** configuration which could be used here */
final Configuration config;
private Vertex<String, Node> vertex = new Vertex<>();
public VertexExtractor(final Configuration config) {
this.config = config;
}
@Override
public Vertex<String, Node> map(Statement[] statements) throws Exception {
Node node = new Node();
String subjectString = null;
Node currentNode = null;
for (Statement statement : statements) {
Resource subject = statement.getSubject();
// set URI --> requires that the first URI in array is main URI of entity!
if (null == subjectString) {
String uri = subject.stringValue();
node.setUri(uri);
// do some other book keeping tasks
subjectString = subject.stringValue();
currentNode = node;
}
if (false == subjectString.equals(subject.stringValue())) {
subjectString = subject.stringValue();
if (node.getUri().equals(subjectString)) {
currentNode = node;
} else {
String uri = subjectString;
currentNode = new Node();
currentNode.setUri(uri);
}
}
String predicateString = statement.getPredicate().stringValue();
NodeObject nodeObject = null;
Value object = statement.getObject();
if (object instanceof URI) {
nodeObject = new NodeUriObject(object.stringValue());
} else if (object instanceof Literal){
Literal literalObject = (Literal) object;
String language = null;
URI dataType = literalObject.getDatatype();
if (null == dataType) {
language = literalObject.getLanguage();
}
nodeObject = new NodeLiteralObject(literalObject.stringValue(),
null != dataType ? dataType.stringValue() : language,
null != dataType);
}
node.addPredicateObject(predicateString, nodeObject);
}
vertex.f0 = node.getUri();
vertex.f1 = node;
return vertex;
}
}
/**
* Class wgucg cab be used to get edges for new graph
*
* @author kay
*
*
*/
private static final class EdgeExtractor implements FlatMapFunction<Statement[], Edge<String, String>> {
/**
*
*/
private static final long serialVersionUID = 1L;
private final Edge<String, String> edge = new Edge<>();
final Configuration config;
public EdgeExtractor(final Configuration config) {
this.config = config;
}
/// TODO Add filters for URIs! --> only get external entities and not internal properties (e.g. rdf:type
@Override
public void flatMap(Statement[] statements, Collector<Edge<String, String>> out) throws Exception {
for (Statement statement : statements) {
Value object = statement.getObject();
if (false == (object instanceof URI)) {
continue;
}
String subjectUri = statement.getSubject().stringValue();
String objectUri = statement.getPredicate().stringValue();
// only pick different entities
if (objectUri.startsWith(subjectUri)) {
continue;
}
/// TODO get base URI
if (false == objectUri.startsWith("http://corp.dbpedia.org/resource/")) {
continue;
}
edge.f0 = statement.getSubject().stringValue();
edge.f1 = object.stringValue();
edge.f2 = statement.getPredicate().stringValue();
out.collect(edge);
}
}
}
public long getEntityCount() throws IngestionException {
try {
return this.graph.numberOfVertices();
} catch (Exception e) {
throw new IngestionException("Was not able to count entities", e);
}
}
public long getEdgeCount() throws IngestionException {
try {
return this.graph.numberOfVertices();
} catch (Exception e) {
throw new IngestionException("Was not able to count edges", e);
}
}
@Override
public Iterator<Node> getEntities() throws IngestionException {
// DataSet<Vertex<String, Node>> vertices = this.graph.getVertices();
//
//
// long entityCount = this.getEntityCount();
//
// IterativeDataSet<Vertex<String, Node>> iterator = vertices.iterate((int) entityCount);
return null;
}
/**
* Internal class which can be used to output the data to the output file
*
* @author kay
*
*/
static class FlinkVerticesOutputFormat implements OutputFormat<Vertex<String,Node>> {
private static final long serialVersionUID = 4223077947638189019L;
@Override
public void configure(Configuration parameters) {
System.out.println("Got parameters: " + parameters.keySet().size());
}
@Override
public void open(int taskNumber, int numTasks) throws IOException {
System.out.println("Task number: " + taskNumber + " numTasks: " + numTasks);
}
@Override
public void writeRecord(Vertex<String,Node> record) throws IOException {
System.out.println(record.f1);
}
@Override
public void close() throws IOException {
System.out.println("close");
}
};
}
| |
/*
* @(#)ResourceCatalog.java 1.6 05/11/17
*
* Copyright (c) 2006 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* -Redistribution of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* -Redistribution in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING
* ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MIDROSYSTEMS, INC. ("SUN")
* AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
* AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST
* REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL,
* INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY
* OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of any
* nuclear facility.
*/
package jnlp.sample.servlet;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.servlet.ServletContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import jnlp.sample.servlet.download.DownloadRequest;
import jnlp.sample.servlet.download.DownloadResponse;
import jnlp.sample.util.ObjectUtil;
import jnlp.sample.util.VersionID;
import jnlp.sample.util.VersionString;
import jnlp.sample.util.log.Logger;
import jnlp.sample.util.log.LoggerFactory;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXParseException;
public class ResourceCatalog {
public static final String VERSION_XML_FILENAME="version.xml";
protected final transient Logger _log;
private final ServletContext _servletContext;
public final ServletContext getServletContext ()
{
return _servletContext;
}
/* Class to contain the information we know
* about a specific directory
*/
static public class PathEntries {
/* Version-based entries at this particular path */
private List<JnlpResource> _versionXmlList;
private List<JnlpResource> _directoryList;
private List<JnlpResource> _platformList;
/* Last time this entry was updated */
private long _lastModified; // Last modified time of entry;
public PathEntries (List<JnlpResource> versionXmlList, List<JnlpResource> directoryList, List<JnlpResource> platformList, long lastModified)
{
_versionXmlList = versionXmlList;
_directoryList = directoryList;
_platformList = platformList;
_lastModified = lastModified;
}
public List<JnlpResource> getDirectoryList () { return _directoryList; }
public void setDirectoryList (List<JnlpResource> dirList)
{
_directoryList = dirList;
}
public List<JnlpResource> getVersionXmlList () { return _versionXmlList; }
public void setVersionXmlList (List<JnlpResource> l)
{
_versionXmlList = l;
}
public List<JnlpResource> getPlatformList () { return _platformList; }
public void setPlatformList (List<JnlpResource> l)
{
_platformList = l;
}
public long getLastModified () { return _lastModified; }
public void setLastModified (long m)
{
_lastModified = m;
}
}
private Map<String,PathEntries> _entries;
// NOTE !!! MAY NOT RETURN NULL
protected Map<String,PathEntries> getEntriesMap ()
{
return _entries;
}
protected void setEntriesMap (Map<String,PathEntries> m)
{
_entries = m;
}
public ResourceCatalog (ServletContext servletContext)
{
_entries = new TreeMap<String,PathEntries>();
_servletContext = servletContext;
_log = LoggerFactory.getLogger(ResourceCatalog.class);
}
protected int lookupRecursive (final DownloadRequest dreq, final String reqVersion, final JnlpResource[] result)
{
final String path=(null == dreq) ? "" : dreq.getPath();
// Split request up into path and name
String name=null, dir=null;
int idx=path.lastIndexOf('/');
if (idx < 0)
{
name = path;
}
else
{
name = path.substring(idx + 1); // Exclude '/'
dir = path.substring(0, idx + 1); // Include '/'
}
// Lookup up already parsed entries, and scan directory for entries if necessary
final Map<String,PathEntries> em=getEntriesMap();
PathEntries pentries=em.get(dir);
final JnlpResource xmlVersionResPath=
new JnlpResource(getServletContext(), dir + VERSION_XML_FILENAME);
final long xmlResModified=xmlVersionResPath.getLastModified(),
pentriesModified=(null == pentries) ? 0L : pentries.getLastModified();
if ((pentries == null)
|| (xmlVersionResPath.exists() && (xmlResModified > pentriesModified)))
{
if (_log.isInformationalLevel())
_log.info("servlet.log.scandir", dir);
// Scan XML file
final List<JnlpResource> dirList=scanDirectory(dir, dreq, reqVersion),
versionList=new ArrayList<JnlpResource>(),
platformList=new ArrayList<JnlpResource>();
parseVersionXML(versionList, platformList, dir, xmlVersionResPath);
pentries = new PathEntries(versionList, dirList, platformList, xmlVersionResPath.getLastModified());
em.put(dir, pentries);
}
if ((dreq != null) && dreq.isPlatformRequest())
return findMatch(pentries.getPlatformList(), name, reqVersion, dreq, result);
// First lookup in versions.xml file
final int sts1=findMatch(pentries.getVersionXmlList(), name, reqVersion, dreq, result);
if (sts1 != DownloadResponse.STS_00_OK)
{
// Then lookup in directory
int sts2=findMatch(pentries.getDirectoryList(), name, reqVersion, dreq, result);
if (sts2 != DownloadResponse.STS_00_OK)
{
// fix for 4450104
// try rescan and see if it helps
final List<JnlpResource> dirList=scanDirectory(dir, dreq, reqVersion);
pentries.setDirectoryList(dirList);
// try again after rescanning directory
if ((sts2=findMatch(pentries.getDirectoryList(), name, reqVersion, dreq, result)) != DownloadResponse.STS_00_OK)
return Math.max(sts1, sts2); // Throw the most specific error code
}
}
if (_log.isDebugLevel())
_log.debug("lookupRecursive(" + path + ")[" + reqVersion + "] => " + result[0]);
return DownloadResponse.STS_00_OK;
}
// use same naming convention as Maven - i.e name-version.ext
protected int lookupDirect (final DownloadRequest dreq, final String reqVersion, final JnlpResource[] result)
{
if ((null == dreq) || (null == reqVersion) || (reqVersion.length() <= 0))
return DownloadResponse.ERR_10_NO_RESOURCE;
final String path=dreq.getPath();
final int idx=(null == path) ? (-1) : path.lastIndexOf('.');
if (idx < 0) // if no extension then assume no resource
return DownloadResponse.ERR_10_NO_RESOURCE;
final String subPath=path.substring(0, idx), // excluding the '.;
name=path.substring(path.lastIndexOf('/') + 1),
ext=path.substring(idx), // including the '.'
newPath=subPath + "-" + reqVersion + ext;
final JnlpResource res=
new JnlpResource(getServletContext(), name, reqVersion, dreq.getOS(), dreq.getArch(), dreq.getLocale(), newPath, reqVersion);
if (!res.exists())
return DownloadResponse.ERR_10_NO_RESOURCE;
if (_log.isDebugLevel())
_log.debug("lookupDirect(" + path + ")[" + reqVersion + "] => " + res);
result[0] = res;
return DownloadResponse.STS_00_OK;
}
protected int resolveLookupResult (final JnlpResource directLookup,
final int stsDirect,
final JnlpResource rcrsvLookup,
final int stsRcrsv,
final JnlpResource[] result)
{
if (stsRcrsv == DownloadResponse.STS_00_OK)
{
result[0] = rcrsvLookup; // prefer more detailed location
return DownloadResponse.STS_00_OK;
}
else if (stsDirect == DownloadResponse.STS_00_OK)
{
result[0] = directLookup;
return DownloadResponse.STS_00_OK;
}
return Math.max(stsDirect, stsRcrsv);
}
public JnlpResource lookupResource (final DownloadRequest dreq, final String reqVersion)
throws ErrorResponseException
{
final JnlpResource[] result=new JnlpResource[1];
final int stsDirect=lookupDirect(dreq, reqVersion, result);
final JnlpResource directLookup=
(DownloadResponse.STS_00_OK == stsDirect) ? result[0] : null;
if (result[0] != null) // start from scratch
result[0] = null;
final int stsRcrsv=lookupRecursive(dreq, reqVersion, result);
final JnlpResource rcrsvLookup=
(DownloadResponse.STS_00_OK == stsRcrsv) ? result[0] : null;
if (result[0] != null) // start from scratch
result[0] = null;
int sts=
resolveLookupResult(directLookup, stsDirect, rcrsvLookup, stsRcrsv, result);
final JnlpResource res=result[0];
if ((DownloadResponse.STS_00_OK == sts) && (res == null))
sts = DownloadResponse.ERR_10_NO_RESOURCE;
if (sts != DownloadResponse.STS_00_OK)
throw new ErrorResponseException(DownloadResponse.getJnlpErrorResponse(DownloadResponse.ERR_10_NO_RESOURCE));
return res;
}
public JnlpResource lookupResource (final DownloadRequest dreq) throws ErrorResponseException
{
return lookupResource(dreq, (null == dreq) ? null : dreq.getVersion());
}
/* This method finds the best match, or return the best error code. The
* result parameter must be an array with room for one element.
*
* If a match is found, the method returns DownloadResponse.STS_00_OK
* If one or more entries matches on: name, version-id, os, arch, and locale,
* then the one with the highest version-id is set in the result[0] field.
*
* If a match is not found, it returns an error code, either: ERR_10_NO_RESOURCE,
* ERR_11_NO_VERSION, ERR_20_UNSUP_OS, ERR_21_UNSUP_ARCH, ERR_22_UNSUP_LOCALE,
* ERR_23_UNSUP_JRE.
*
*/
public static int findMatch (final Collection<? extends JnlpResource> list,
final String name,
final String reqVersion,
final DownloadRequest dreq,
final JnlpResource[] result)
{
if ((list == null) || (list.size() <= 0) || (null == dreq))
return DownloadResponse.ERR_10_NO_RESOURCE;
// Setup return values
VersionID bestVersionId = null;
int error=DownloadResponse.ERR_10_NO_RESOURCE;
final VersionString vs=new VersionString(reqVersion);
// Iterate through entries
for (final JnlpResource respath : list)
{
if (null == respath)
continue;
final VersionID vid=new VersionID(respath.getVersionId());
final int sts=matchEntry(name, vs, dreq, respath, vid);
if (sts == DownloadResponse.STS_00_OK)
{
if ((result[0] == null) || vid.isGreaterThan(bestVersionId))
{
result[0] = respath;
bestVersionId = vid;
}
}
else
{
error = Math.max(error, sts);
}
}
return (result[0] != null) ? DownloadResponse.STS_00_OK : error;
}
public static int matchEntry (String name, VersionString vs, DownloadRequest dreq, JnlpResource jnlpres, VersionID vid)
{
if ((null == jnlpres) || (null == vs))
return DownloadResponse.ERR_10_NO_RESOURCE;
if (!name.equals(jnlpres.getName()))
return DownloadResponse.ERR_10_NO_RESOURCE;
if (!vs.contains(vid))
return DownloadResponse.ERR_11_NO_VERSION;
if (!prefixMatchLists(jnlpres.getOSList(), dreq.getOS()))
return DownloadResponse.ERR_20_UNSUP_OS;
if (!prefixMatchLists(jnlpres.getArchList(), dreq.getArch()))
return DownloadResponse.ERR_21_UNSUP_ARCH;
if (!prefixMatchLists(jnlpres.getLocaleList(), dreq.getLocale()))
return DownloadResponse.ERR_22_UNSUP_LOCALE;
return DownloadResponse.STS_00_OK;
}
private static boolean prefixMatchStringList (String[] prefixList, String target)
{
// No prefixes matches everything
if ((prefixList == null) || (prefixList.length <= 0))
return true;
// No target, but a prefix list does not match anything
if ((target == null) || (target.length() <= 0))
return false;
for (final String p : prefixList)
{
if ((null == p) || (p.length() <= 0))
continue;
if (target.startsWith(p))
return true;
}
return false;
}
/* Return true if at least one of the strings in 'prefixes' are a prefix
* to at least one of the 'keys'.
*/
public static boolean prefixMatchLists (String[] prefixes, String[] keys)
{
// The prefixes are part of the server resources. If none is given,
// everything matches
if ((prefixes == null) || (prefixes.length <= 0))
return true;
// If no os keyes was given, and the server resource is keyed of this,
// then return false.
if ((keys == null) || (keys.length <= 0))
return false;
// Check for a match on a key
for(final String k : keys)
{
if (prefixMatchStringList(prefixes, k))
return true;
}
return false;
}
private static String appendToFilename (String org, String prefix, String ... comps)
{
if ((null == prefix) || (prefix.length() <= 0)
|| (null == comps) || (comps.length <= 0))
return org;
String filename=org;
for (final String c : comps)
{
if ((null == c) || (c.length() <= 0))
continue;
filename += prefix + c;
}
return filename;
}
public static final char VERSION_SEP_CHAR='V',
OS_SEP_CHAR='O',
ARCH_SEP_CHAR='A',
LOCALE_SEP_CHAR='L';
public static final String PATH_PREFIX_SEP="__",
VERSION_PATH_PREFIX=PATH_PREFIX_SEP + String.valueOf(VERSION_SEP_CHAR),
OS_PATH_PREFIX=PATH_PREFIX_SEP + String.valueOf(OS_SEP_CHAR),
ARCH_PATH_PREFIX=PATH_PREFIX_SEP + String.valueOf(ARCH_SEP_CHAR),
LOCALE_PATH_PREFIX=PATH_PREFIX_SEP + String.valueOf(LOCALE_SEP_CHAR);
/* This method scans the directory pointed to by the
* given path and creates a list of ResourcePath elements
* that contains information about all the entries
*
* The version-based information is encoded in the file name
* given the following format:
*
* entry ::= <name> __ ( <options> ). <ext>
* options ::= <option> ( __ <options> )?
* option ::= V<version-id>
* | O<os>
* | A<arch>
* | L<locale>
*
*/
public static String jnlpGetPath (final DownloadRequest dreq, final String reqVersion)
{
// fix for 4474021
// try to manually generate the filename
// extract file name
String path=(null == dreq) ? "" : dreq.getPath(),
filename=path.substring(path.lastIndexOf('/') + 1),
ext=null;
path = path.substring(0, path.lastIndexOf('/') + 1);
int idx=filename.lastIndexOf('.');
if (idx >= 0)
{
ext = filename.substring(idx + 1);
filename = filename.substring(0, idx);
}
filename = appendToFilename(filename, VERSION_PATH_PREFIX, reqVersion);
filename = appendToFilename(filename, OS_PATH_PREFIX, (null == dreq) ? null : dreq.getOS());
filename = appendToFilename(filename, ARCH_PATH_PREFIX, (null == dreq) ? null : dreq.getArch());
filename = appendToFilename(filename, LOCALE_PATH_PREFIX, (null == dreq) ? null : dreq.getLocale());
if ((ext != null) && (ext.length() > 0))
filename += "." + ext;
return path + filename;
}
public List<JnlpResource> scanDirectory (String dirPath, DownloadRequest dreq, String reqVersion)
{
final ServletContext ctx=getServletContext();
final String effPath=ctx.getRealPath(dirPath);
// fix for 4474021
if ((effPath == null) || (effPath.length() <= 0))
{
final String path=jnlpGetPath(dreq, reqVersion),
reqPath=dreq.getPath(),
name=reqPath.substring(path.lastIndexOf('/') + 1);
final JnlpResource jnlpres=new JnlpResource(ctx, name, reqVersion, dreq.getOS(), dreq.getArch(), dreq.getLocale(), path, reqVersion);
// the file does not exist
if (!jnlpres.exists())
return null;
// we create a bigger modifiable list since it may be manipulated further down the code
List<JnlpResource> list=new ArrayList<JnlpResource>();
list.add(jnlpres);
return list;
}
final File dir = new File(effPath);
if (_log.isDebugLevel())
_log.debug("scanDirectory(" + dir + ") => " + effPath);
if (dir.exists() && dir.isDirectory())
{
final File[] entries=dir.listFiles();
final int numEntries=(null == entries) ? 0 : entries.length;
final List<JnlpResource> list=new ArrayList<JnlpResource>(numEntries);
if (numEntries > 0)
{
for (final File f : entries)
{
if (null == f)
continue;
final JnlpResource jnlpres=parseFileEntry(dirPath, f.getName());
if (null == jnlpres)
continue;
if (_log.isDebugLevel())
_log.debug("scanDirectory(" + dir + ") read file resource: " + jnlpres);
list.add(jnlpres);
}
}
}
return null;
}
protected JnlpResource parseFileEntry (final String dir, final String filename)
{
if ((null == filename) || (filename.length() <= 0))
return null;
int idx=filename.indexOf(PATH_PREFIX_SEP);
if (idx < 0)
return null;
// Cut out name and extension
final String name=filename.substring(0, idx), extension;
String rest=filename.substring(idx);
if ((idx=rest.lastIndexOf('.')) >= 0)
{
extension = rest.substring(idx);
rest = rest.substring(0, idx);
}
else
extension = "";
// Parse options
final Collection<String> osList=new LinkedList<String>(),
archList=new LinkedList<String>(),
localeList=new LinkedList<String>();
String versionId=null;
while ((rest != null) && (rest.length() > 0))
{
/* Must start with __ at this point */
if (!rest.startsWith(PATH_PREFIX_SEP))
return null;
rest = rest.substring(PATH_PREFIX_SEP.length());
// Get option and argument
final char option=Character.toUpperCase(rest.charAt(0)); // be lenient
final String arg;
if ((idx= rest.indexOf(PATH_PREFIX_SEP)) >= 0)
{
arg = rest.substring(1);
rest = "";
}
else
{
arg = rest.substring(1, idx);
rest = rest.substring(idx);
}
switch(option)
{
case VERSION_SEP_CHAR :
versionId = arg;
break;
case OS_SEP_CHAR :
osList.add(arg);
break;
case ARCH_SEP_CHAR :
archList.add(arg);
break;
case LOCALE_SEP_CHAR :
localeList.add(arg);
break;
default :
return null; // error
}
}
return new JnlpResource(getServletContext(),
name + extension, /* Resource name in URL request */
versionId,
listToStrings(osList),
listToStrings(archList),
listToStrings(localeList),
dir + filename, /* Resource name in WAR file */
versionId);
}
private static String[] listToStrings (Collection<String> list)
{
if ((null == list) || (list.size() <= 0))
return null;
return list.toArray(new String[list.size()]);
}
public static final String JNLP_VERSIONS_ELEM_NAME="jnlp-versions",
RESOURCE_ELEM_NAME="resource",
RESOURCE_ELEM_PATTERN="<" + RESOURCE_ELEM_NAME + ">",
PATTERN_ELEM_NAME="pattern",
PATTERN_ELEM_PATTERN="<" + PATTERN_ELEM_NAME + ">",
NAME_ELEM_NAME="name",
NAME_ELEM_PATTERN="<" + NAME_ELEM_NAME + ">",
VERSION_ID_ELEM_NAME="version-id",
VERSION_ID_ELEM_PATTERN="<" + VERSION_ID_ELEM_NAME + ">",
OS_ELEM_NAME="os",
OS_ELEM_PATTERN="<" + OS_ELEM_NAME + ">",
ARCH_ELEM_NAME="arch",
ARCH_ELEM_PATTERN="<" + ARCH_ELEM_NAME + ">",
LOCALE_ELEM_NAME="locale",
LOCALE_ELEM_PATTERN="<" + LOCALE_ELEM_NAME + ">",
FILE_ELEM_NAME="file",
FILE_ELEM_PATTERN="<" + FILE_ELEM_NAME + ">",
PLATFORM_ELEM_NAME="platform",
PLATFORM_ELEM_PATTERN="<" + PLATFORM_ELEM_NAME + ">",
PRODUCT_VERSION_ELEM_NAME="product-version-id",
PRODUCT_VERSION_ELEM_PATTERN="<" + PRODUCT_VERSION_ELEM_NAME + ">";
protected void parseVersionXML (final Collection<JnlpResource> versionList,
final Collection<JnlpResource> platformList,
final String dir, final JnlpResource versionRes)
{
if ((null == versionRes) || (!versionRes.exists()))
return;
// Parse XML into a more understandable format
XMLNode root=null;
try
{
final DocumentBuilderFactory docBuilderFactory=DocumentBuilderFactory.newInstance();
final DocumentBuilder docBuilder=docBuilderFactory.newDocumentBuilder();
final URL versionURL=versionRes.getResource();
InputStream inVersion=null;
final Document doc;
try
{
inVersion = new BufferedInputStream(ObjectUtil.openResource(versionURL));
if (null == (doc=docBuilder.parse(inVersion)))
throw new DOMException(DOMException.SYNTAX_ERR, "No document parsed");
}
finally
{
if (inVersion != null)
{
try
{
inVersion.close();
}
catch(IOException e)
{
// ignored
}
}
}
final Element docElem=doc.getDocumentElement();
docElem.normalize();
// Convert document into an XMLNode structure, since we already got utility methods
// to handle these. We should really use the data-binding stuff here - but that will come
// later
//
if (null == (root=XMLParsing.convert(docElem)))
throw new DOMException(DOMException.NAMESPACE_ERR, "No root found");
}
catch (SAXParseException err)
{
_log.warn("servlet.log.warning.xml.parsing",
versionRes.getPath(),
Integer.toString(err.getLineNumber()),
err.getMessage());
return;
}
catch (Throwable t)
{
_log.warn("servlet.log.warning.xml.reading", t, versionRes.getPath());
return;
}
// Check that root element is a <jnlp> tag
final String rootName=root.getName();
if (!JNLP_VERSIONS_ELEM_NAME.equalsIgnoreCase(rootName))
{
_log.warn("servlet.log.warning.xml.missing-jnlp", versionRes.getPath());
return;
}
// Visit all <resource> elements
XMLParsing.visitElements(root, RESOURCE_ELEM_PATTERN, new XMLParsing.ElementVisitor() {
/*
* @see jnlp.sample.servlet.XMLParsing.ElementVisitor#visitElement(jnlp.sample.servlet.XMLNode)
*/
@Override
public void visitElement(XMLNode node)
{
XMLNode pattern = XMLParsing.findElementPath(node, PATTERN_ELEM_PATTERN);
if (pattern == null)
{
_log.warn("servlet.log.warning.xml.missing-pattern", versionRes.getPath());
return;
}
// Parse pattern
final String name=XMLParsing.getElementContent(pattern , NAME_ELEM_PATTERN, "");
final String versionId=XMLParsing.getElementContent(pattern , VERSION_ID_ELEM_PATTERN);
final String[] os=XMLParsing.getMultiElementContent(pattern, OS_ELEM_PATTERN);
final String[] arch=XMLParsing.getMultiElementContent(pattern, ARCH_ELEM_PATTERN);
final String[] locale=XMLParsing.getMultiElementContent(pattern, LOCALE_ELEM_PATTERN);
// Get return request
final String file=XMLParsing.getElementContent(node, FILE_ELEM_PATTERN);
if ((versionId == null) || (versionId.length() <= 0)
|| (file == null) || (file.length() <= 0))
{
_log.warn("servlet.log.warning.xml.missing-elems", versionRes.getPath());
return;
}
final JnlpResource res=new JnlpResource(getServletContext(),
name,
versionId,
os,
arch,
locale,
dir + file,
versionId);
if (res.exists())
{
if (_log.isDebugLevel())
_log.debug("Read resource: " + res);
versionList.add(res);
}
else
_log.warn("servlet.log.warning.missing-file", file, versionRes.getPath());
}
});
// Visit all <resource> elements
XMLParsing.visitElements(root, PLATFORM_ELEM_PATTERN, new XMLParsing.ElementVisitor() {
/*
* @see jnlp.sample.servlet.XMLParsing.ElementVisitor#visitElement(jnlp.sample.servlet.XMLNode)
*/
@Override
public void visitElement (XMLNode node)
{
XMLNode pattern=XMLParsing.findElementPath(node, PATTERN_ELEM_PATTERN);
if (pattern == null)
{
_log.warn("servlet.log.warning.xml.missing-pattern", versionRes.getPath());
return;
}
// Parse pattern
final String name=XMLParsing.getElementContent(pattern , NAME_ELEM_PATTERN, "");
final String versionId=XMLParsing.getElementContent(pattern , VERSION_ID_ELEM_PATTERN);
final String[] os=XMLParsing.getMultiElementContent(pattern, OS_ELEM_PATTERN);
final String[] arch=XMLParsing.getMultiElementContent(pattern, ARCH_ELEM_PATTERN);
final String[] locale=XMLParsing.getMultiElementContent(pattern, LOCALE_ELEM_PATTERN);
// Get return request
final String file=XMLParsing.getElementContent(node, FILE_ELEM_PATTERN);
final String productId=XMLParsing.getElementContent(node, PRODUCT_VERSION_ELEM_PATTERN);
if ((versionId == null) || (versionId.length() <= 0)
|| (file == null) || (file.length() <= 0)
|| (productId == null) || (productId.length() <= 0))
{
_log.warn("servlet.log.warning.xml.missing-elems2", versionRes.getPath());
return;
}
final JnlpResource res=new JnlpResource(getServletContext(),
name,
versionId,
os,
arch,
locale,
dir + file,
productId);
if (res.exists())
{
if (_log.isDebugLevel())
_log.debug("Read platform resource: " + res);
platformList.add(res);
}
else
_log.warn("servlet.log.warning.missing-file", file, versionRes.getPath());
}
});
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Pattern;
import javax.activation.DataHandler;
import javax.mail.Authenticator;
import javax.mail.Message;
import javax.mail.Message.RecipientType;
import javax.mail.MessagingException;
import javax.mail.PasswordAuthentication;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import javax.mail.internet.MimeUtility;
import javax.mail.internet.PreencodedMimeBodyPart;
import javax.mail.util.ByteArrayDataSource;
import org.apache.commons.codec.binary.Base64;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.SystemResource;
import org.apache.nifi.annotation.behavior.SystemResourceConsideration;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.stream.io.StreamUtils;
@SupportsBatching
@Tags({"email", "put", "notify", "smtp"})
@InputRequirement(Requirement.INPUT_REQUIRED)
@CapabilityDescription("Sends an e-mail to configured recipients for each incoming FlowFile")
@SystemResourceConsideration(resource = SystemResource.MEMORY, description = "The entirety of the FlowFile's content (as a String object) "
+ "will be read into memory in case the property to use the flow file content as the email body is set to true.")
public class PutEmail extends AbstractProcessor {
public static final PropertyDescriptor SMTP_HOSTNAME = new PropertyDescriptor.Builder()
.name("SMTP Hostname")
.description("The hostname of the SMTP host")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor SMTP_PORT = new PropertyDescriptor.Builder()
.name("SMTP Port")
.description("The Port used for SMTP communications")
.required(true)
.defaultValue("25")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.PORT_VALIDATOR)
.build();
public static final PropertyDescriptor SMTP_USERNAME = new PropertyDescriptor.Builder()
.name("SMTP Username")
.description("Username for the SMTP account")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.required(false)
.build();
public static final PropertyDescriptor SMTP_PASSWORD = new PropertyDescriptor.Builder()
.name("SMTP Password")
.description("Password for the SMTP account")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.required(false)
.sensitive(true)
.build();
public static final PropertyDescriptor SMTP_AUTH = new PropertyDescriptor.Builder()
.name("SMTP Auth")
.description("Flag indicating whether authentication should be used")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.defaultValue("true")
.build();
public static final PropertyDescriptor SMTP_TLS = new PropertyDescriptor.Builder()
.name("SMTP TLS")
.description("Flag indicating whether TLS should be enabled")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.defaultValue("false")
.build();
public static final PropertyDescriptor SMTP_SOCKET_FACTORY = new PropertyDescriptor.Builder()
.name("SMTP Socket Factory")
.description("Socket Factory to use for SMTP Connection")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.defaultValue("javax.net.ssl.SSLSocketFactory")
.build();
public static final PropertyDescriptor HEADER_XMAILER = new PropertyDescriptor.Builder()
.name("SMTP X-Mailer Header")
.description("X-Mailer used in the header of the outgoing email")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.defaultValue("NiFi")
.build();
public static final PropertyDescriptor ATTRIBUTE_NAME_REGEX = new PropertyDescriptor.Builder()
.name("attribute-name-regex")
.displayName("Attributes to Send as Headers (Regex)")
.description("A Regular Expression that is matched against all FlowFile attribute names. "
+ "Any attribute whose name matches the regex will be added to the Email messages as a Header. "
+ "If not specified, no FlowFile attributes will be added as headers.")
.addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR)
.required(false)
.build();
public static final PropertyDescriptor CONTENT_TYPE = new PropertyDescriptor.Builder()
.name("Content Type")
.description("Mime Type used to interpret the contents of the email, such as text/plain or text/html")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.defaultValue("text/plain")
.build();
public static final PropertyDescriptor FROM = new PropertyDescriptor.Builder()
.name("From")
.description("Specifies the Email address to use as the sender. "
+ "Comma separated sequence of addresses following RFC822 syntax.")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor TO = new PropertyDescriptor.Builder()
.name("To")
.description("The recipients to include in the To-Line of the email. "
+ "Comma separated sequence of addresses following RFC822 syntax.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor CC = new PropertyDescriptor.Builder()
.name("CC")
.description("The recipients to include in the CC-Line of the email. "
+ "Comma separated sequence of addresses following RFC822 syntax.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor BCC = new PropertyDescriptor.Builder()
.name("BCC")
.description("The recipients to include in the BCC-Line of the email. "
+ "Comma separated sequence of addresses following RFC822 syntax.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor SUBJECT = new PropertyDescriptor.Builder()
.name("Subject")
.description("The email subject")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.defaultValue("Message from NiFi")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor MESSAGE = new PropertyDescriptor.Builder()
.name("Message")
.description("The body of the email message")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor ATTACH_FILE = new PropertyDescriptor.Builder()
.name("Attach File")
.description("Specifies whether or not the FlowFile content should be attached to the email")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.build();
public static final PropertyDescriptor CONTENT_AS_MESSAGE = new PropertyDescriptor.Builder()
.name("email-ff-content-as-message")
.displayName("Flow file content as message")
.description("Specifies whether or not the FlowFile content should be the message of the email. If true, the 'Message' property is ignored.")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.defaultValue("false")
.build();
public static final PropertyDescriptor INCLUDE_ALL_ATTRIBUTES = new PropertyDescriptor.Builder()
.name("Include All Attributes In Message")
.description("Specifies whether or not all FlowFile attributes should be recorded in the body of the email message")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.build();
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("FlowFiles that are successfully sent will be routed to this relationship")
.build();
public static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("FlowFiles that fail to send will be routed to this relationship")
.build();
private List<PropertyDescriptor> properties;
private Set<Relationship> relationships;
/**
* Mapping of the mail properties to the NiFi PropertyDescriptors that will be evaluated at runtime
*/
private static final Map<String, PropertyDescriptor> propertyToContext = new HashMap<>();
static {
propertyToContext.put("mail.smtp.host", SMTP_HOSTNAME);
propertyToContext.put("mail.smtp.port", SMTP_PORT);
propertyToContext.put("mail.smtp.socketFactory.port", SMTP_PORT);
propertyToContext.put("mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY);
propertyToContext.put("mail.smtp.auth", SMTP_AUTH);
propertyToContext.put("mail.smtp.starttls.enable", SMTP_TLS);
propertyToContext.put("mail.smtp.user", SMTP_USERNAME);
propertyToContext.put("mail.smtp.password", SMTP_PASSWORD);
}
@Override
protected void init(final ProcessorInitializationContext context) {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(SMTP_HOSTNAME);
properties.add(SMTP_PORT);
properties.add(SMTP_USERNAME);
properties.add(SMTP_PASSWORD);
properties.add(SMTP_AUTH);
properties.add(SMTP_TLS);
properties.add(SMTP_SOCKET_FACTORY);
properties.add(HEADER_XMAILER);
properties.add(ATTRIBUTE_NAME_REGEX);
properties.add(CONTENT_TYPE);
properties.add(FROM);
properties.add(TO);
properties.add(CC);
properties.add(BCC);
properties.add(SUBJECT);
properties.add(MESSAGE);
properties.add(CONTENT_AS_MESSAGE);
properties.add(ATTACH_FILE);
properties.add(INCLUDE_ALL_ATTRIBUTES);
this.properties = Collections.unmodifiableList(properties);
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
this.relationships = Collections.unmodifiableSet(relationships);
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@Override
protected Collection<ValidationResult> customValidate(final ValidationContext context) {
final List<ValidationResult> errors = new ArrayList<>(super.customValidate(context));
final String to = context.getProperty(TO).getValue();
final String cc = context.getProperty(CC).getValue();
final String bcc = context.getProperty(BCC).getValue();
if (to == null && cc == null && bcc == null) {
errors.add(new ValidationResult.Builder().subject("To, CC, BCC").valid(false).explanation("Must specify at least one To/CC/BCC address").build());
}
return errors;
}
private volatile Pattern attributeNamePattern = null;
@OnScheduled
public void onScheduled(final ProcessContext context) {
final String attributeNameRegex = context.getProperty(ATTRIBUTE_NAME_REGEX).getValue();
this.attributeNamePattern = attributeNameRegex == null ? null : Pattern.compile(attributeNameRegex);
}
private void setMessageHeader(final String header, final String value, final Message message) throws MessagingException {
final ComponentLog logger = getLogger();
try {
message.setHeader(header, MimeUtility.encodeText(value));
} catch (UnsupportedEncodingException e){
logger.warn("Unable to add header {} with value {} due to encoding exception", new Object[]{header, value});
}
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final Properties properties = this.getMailPropertiesFromFlowFile(context, flowFile);
final Session mailSession = this.createMailSession(properties);
final Message message = new MimeMessage(mailSession);
final ComponentLog logger = getLogger();
try {
message.addFrom(toInetAddresses(context, flowFile, FROM));
message.setRecipients(RecipientType.TO, toInetAddresses(context, flowFile, TO));
message.setRecipients(RecipientType.CC, toInetAddresses(context, flowFile, CC));
message.setRecipients(RecipientType.BCC, toInetAddresses(context, flowFile, BCC));
if (attributeNamePattern != null) {
for (final Map.Entry<String, String> entry : flowFile.getAttributes().entrySet()) {
if (attributeNamePattern.matcher(entry.getKey()).matches()) {
this.setMessageHeader(entry.getKey(), entry.getValue(), message);
}
}
}
this.setMessageHeader("X-Mailer", context.getProperty(HEADER_XMAILER).evaluateAttributeExpressions(flowFile).getValue(), message);
message.setSubject(context.getProperty(SUBJECT).evaluateAttributeExpressions(flowFile).getValue());
String messageText = getMessage(flowFile, context, session);
String contentType = context.getProperty(CONTENT_TYPE).evaluateAttributeExpressions(flowFile).getValue();
message.setContent(messageText, contentType);
message.setSentDate(new Date());
if (context.getProperty(ATTACH_FILE).asBoolean()) {
final MimeBodyPart mimeText = new PreencodedMimeBodyPart("base64");
mimeText.setDataHandler(new DataHandler(new ByteArrayDataSource(
Base64.encodeBase64(messageText.getBytes("UTF-8")), contentType + "; charset=\"utf-8\"")));
final MimeBodyPart mimeFile = new MimeBodyPart();
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream stream) throws IOException {
try {
mimeFile.setDataHandler(new DataHandler(new ByteArrayDataSource(stream, "application/octet-stream")));
} catch (final Exception e) {
throw new IOException(e);
}
}
});
mimeFile.setFileName(MimeUtility.encodeText(flowFile.getAttribute(CoreAttributes.FILENAME.key())));
MimeMultipart multipart = new MimeMultipart();
multipart.addBodyPart(mimeText);
multipart.addBodyPart(mimeFile);
message.setContent(multipart);
}
send(message);
session.getProvenanceReporter().send(flowFile, "mailto:" + message.getAllRecipients()[0].toString());
session.transfer(flowFile, REL_SUCCESS);
logger.info("Sent email as a result of receiving {}", new Object[]{flowFile});
} catch (final ProcessException | MessagingException | IOException e) {
context.yield();
logger.error("Failed to send email for {}: {}; routing to failure", new Object[]{flowFile, e.getMessage()}, e);
session.transfer(flowFile, REL_FAILURE);
}
}
private String getMessage(final FlowFile flowFile, final ProcessContext context, final ProcessSession session) {
String messageText = "";
if(context.getProperty(CONTENT_AS_MESSAGE).evaluateAttributeExpressions(flowFile).asBoolean()) {
// reading all the content of the input flow file
final byte[] byteBuffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(InputStream in) throws IOException {
StreamUtils.fillBuffer(in, byteBuffer, false);
}
});
messageText = new String(byteBuffer, 0, byteBuffer.length, Charset.forName("UTF-8"));
} else if (context.getProperty(MESSAGE).isSet()) {
messageText = context.getProperty(MESSAGE).evaluateAttributeExpressions(flowFile).getValue();
}
if (context.getProperty(INCLUDE_ALL_ATTRIBUTES).asBoolean()) {
return formatAttributes(flowFile, messageText);
}
return messageText;
}
/**
* Based on the input properties, determine whether an authenticate or unauthenticated session should be used. If authenticated, creates a Password Authenticator for use in sending the email.
*
* @param properties mail properties
* @return session
*/
private Session createMailSession(final Properties properties) {
String authValue = properties.getProperty("mail.smtp.auth");
Boolean auth = Boolean.valueOf(authValue);
/*
* Conditionally create a password authenticator if the 'auth' parameter is set.
*/
final Session mailSession = auth ? Session.getInstance(properties, new Authenticator() {
@Override
public PasswordAuthentication getPasswordAuthentication() {
String username = properties.getProperty("mail.smtp.user"), password = properties.getProperty("mail.smtp.password");
return new PasswordAuthentication(username, password);
}
}) : Session.getInstance(properties); // without auth
return mailSession;
}
/**
* Uses the mapping of javax.mail properties to NiFi PropertyDescriptors to build the required Properties object to be used for sending this email
*
* @param context context
* @param flowFile flowFile
* @return mail properties
*/
private Properties getMailPropertiesFromFlowFile(final ProcessContext context, final FlowFile flowFile) {
final Properties properties = new Properties();
final ComponentLog logger = this.getLogger();
for (Entry<String, PropertyDescriptor> entry : propertyToContext.entrySet()) {
// Evaluate the property descriptor against the flow file
String flowFileValue = context.getProperty(entry.getValue()).evaluateAttributeExpressions(flowFile).getValue();
String property = entry.getKey();
logger.debug("Evaluated Mail Property: {} with Value: {}", new Object[]{property, flowFileValue});
// Nullable values are not allowed, so filter out
if (null != flowFileValue) {
properties.setProperty(property, flowFileValue);
}
}
return properties;
}
public static final String BODY_SEPARATOR = "\n\n--------------------------------------------------\n";
private static String formatAttributes(final FlowFile flowFile, final String messagePrepend) {
StringBuilder message = new StringBuilder(messagePrepend);
message.append(BODY_SEPARATOR);
message.append("\nStandard FlowFile Metadata:");
message.append(String.format("\n\t%1$s = '%2$s'", "id", flowFile.getAttribute(CoreAttributes.UUID.key())));
message.append(String.format("\n\t%1$s = '%2$s'", "entryDate", new Date(flowFile.getEntryDate())));
message.append(String.format("\n\t%1$s = '%2$s'", "fileSize", flowFile.getSize()));
message.append("\nFlowFile Attributes:");
for (Entry<String, String> attribute : flowFile.getAttributes().entrySet()) {
message.append(String.format("\n\t%1$s = '%2$s'", attribute.getKey(), attribute.getValue()));
}
message.append("\n");
return message.toString();
}
/**
* @param context the current context
* @param flowFile the current flow file
* @param propertyDescriptor the property to evaluate
* @return an InternetAddress[] parsed from the supplied property
* @throws AddressException if the property cannot be parsed to a valid InternetAddress[]
*/
private InternetAddress[] toInetAddresses(final ProcessContext context, final FlowFile flowFile,
PropertyDescriptor propertyDescriptor) throws AddressException {
InternetAddress[] parse;
String value = context.getProperty(propertyDescriptor).evaluateAttributeExpressions(flowFile).getValue();
if (value == null || value.isEmpty()){
if (propertyDescriptor.isRequired()) {
final String exceptionMsg = "Required property '" + propertyDescriptor.getDisplayName() + "' evaluates to an empty string.";
throw new AddressException(exceptionMsg);
} else {
parse = new InternetAddress[0];
}
} else {
try {
parse = InternetAddress.parse(value);
} catch (AddressException e) {
final String exceptionMsg = "Unable to parse a valid address for property '" + propertyDescriptor.getDisplayName() + "' with value '"+ value +"'";
throw new AddressException(exceptionMsg);
}
}
return parse;
}
/**
* Wrapper for static method {@link Transport#send(Message)} to add testability of this class.
*
* @param msg the message to send
* @throws MessagingException on error
*/
protected void send(final Message msg) throws MessagingException {
Transport.send(msg);
}
}
| |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.cli.net;
import static org.onosproject.net.DeviceId.deviceId;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import org.apache.karaf.shell.commands.Argument;
import org.apache.karaf.shell.commands.Command;
import org.apache.karaf.shell.commands.Option;
import org.onlab.packet.MplsLabel;
import org.onlab.packet.VlanId;
import org.onlab.util.Bandwidth;
import org.onosproject.cli.AbstractShellCommand;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.OchSignal;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.TributarySlot;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.intent.IntentId;
import org.onosproject.net.resource.ResourceConsumerId;
import org.onosproject.net.resource.Resources;
import org.onosproject.net.resource.DiscreteResourceId;
import org.onosproject.net.resource.ResourceAllocation;
import org.onosproject.net.resource.ResourceService;
/**
* Lists allocated resources.
*/
@Command(scope = "onos", name = "allocations",
description = "Lists allocated resources")
public class AllocationsCommand extends AbstractShellCommand {
@Option(name = "-t", aliases = "--type",
description = "resource types to include in the list",
required = false, multiValued = true)
String[] typeStrings = null;
Set<String> typesToPrint;
@Option(name = "-i", aliases = "--intentId",
description = "Intent ID to include in the list",
required = false, multiValued = true)
String[] intentStrings;
Set<String> intentsToPrint;
@Argument(index = 0, name = "deviceIdString", description = "Device ID",
required = false, multiValued = false)
String deviceIdStr = null;
@Argument(index = 1, name = "portNumberString", description = "PortNumber",
required = false, multiValued = false)
String portNumberStr = null;
private DeviceService deviceService;
private ResourceService resourceService;
@Override
protected void execute() {
deviceService = get(DeviceService.class);
resourceService = get(ResourceService.class);
if (typeStrings != null) {
typesToPrint = new HashSet<>(Arrays.asList(typeStrings));
} else {
typesToPrint = Collections.emptySet();
}
if (intentStrings != null) {
intentsToPrint = new HashSet<>(Arrays.asList(intentStrings));
} else {
intentsToPrint = Collections.emptySet();
}
if (deviceIdStr != null && portNumberStr != null) {
DeviceId deviceId = deviceId(deviceIdStr);
PortNumber portNumber = PortNumber.fromString(portNumberStr);
printAllocation(deviceId, portNumber, 0);
} else if (deviceIdStr != null) {
DeviceId deviceId = deviceId(deviceIdStr);
printAllocation(deviceId, 0);
} else {
printAllocation();
}
}
private void printAllocation() {
print("ROOT");
StreamSupport.stream(deviceService.getAvailableDevices().spliterator(), false)
.map(Device::id)
.forEach(did -> printAllocation(did, 1));
}
private void printAllocation(DeviceId did, int level) {
print("%s%s", Strings.repeat(" ", level), did);
StreamSupport.stream(deviceService.getPorts(did).spliterator(), false)
.map(Port::number)
.forEach(num -> printAllocation(did, num, level + 1));
}
private void printAllocation(DeviceId did, PortNumber num, int level) {
if (level == 0) {
// print DeviceId when Port was directly specified.
print("%s", did);
}
DiscreteResourceId resourceId = Resources.discrete(did, num).id();
List<String> portConsumers = resourceService.getResourceAllocations(resourceId)
.stream()
.filter(this::isSubjectToPrint)
.map(ResourceAllocation::consumerId)
.map(AllocationsCommand::asVerboseString)
.collect(Collectors.toList());
if (portConsumers.isEmpty()) {
print("%s%s", Strings.repeat(" ", level), asVerboseString(num));
} else {
print("%s%s allocated by %s", Strings.repeat(" ", level), asVerboseString(num),
portConsumers);
}
// FIXME: This workaround induces a lot of distributed store access.
// ResourceService should have an API to get all allocations under a parent resource.
Set<Class<?>> subResourceTypes = ImmutableSet.<Class<?>>builder()
.add(OchSignal.class)
.add(VlanId.class)
.add(MplsLabel.class)
.add(Bandwidth.class)
.add(TributarySlot.class)
.build();
for (Class<?> t : subResourceTypes) {
resourceService.getResourceAllocations(resourceId, t).stream()
.filter(a -> isSubjectToPrint(a))
.forEach(a -> print("%s%s allocated by %s", Strings.repeat(" ", level + 1),
a.resource().valueAs(Object.class).orElse(""), asVerboseString(a.consumerId())));
}
}
private boolean isSubjectToPrint(ResourceAllocation allocation) {
if (!intentsToPrint.isEmpty()
&& allocation.consumerId().isClassOf(IntentId.class)
&& !intentsToPrint.contains(allocation.consumerId().toString())) {
return false;
}
if (!typesToPrint.isEmpty()
&& !typesToPrint.contains(allocation.resource().simpleTypeName())) {
return false;
}
return true;
}
/**
* Add type name if the toString does not start with them.
*
* e.g., IntentId#toString result in "42"
* asVerboseString(id) will result in "IntentId:42"
*
* @param obj non-null Object to print.
* @return verbose String representation
*/
private static String asVerboseString(Object obj) {
String name = obj.getClass().getSimpleName();
String toString = String.valueOf(obj);
if (toString.startsWith(name)) {
return toString;
} else {
return String.format("%s:%s", name, toString);
}
}
private static String asVerboseString(ResourceConsumerId consumerId) {
return String.format("%s:%s", consumerId.consumerClass(), consumerId.value());
}
}
| |
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/licenses/publicdomain
* Other contributors include Andrew Wright, Jeffrey Hayes,
* Pat Fisher, Mike Judd.
*/
package com.google.common.collect;
import junit.framework.TestCase;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* ConcurrentHashMap tests copied from ConcurrentHashMapTest. Useful as a
* test case for CustomConcurrentHashMap.
*/
@SuppressWarnings("unchecked")
public class Jsr166HashMapTest extends TestCase {
/*
* The following two methods and constants were copied from JSR166TestCase.
*/
/**
* fail with message "should throw exception"
*/
public void shouldThrow() {
fail("Should throw exception");
}
/**
* fail with message "Unexpected exception"
*/
public void unexpectedException() {
fail("Unexpected exception");
}
static final Integer zero = new Integer(0);
static final Integer one = new Integer(1);
static final Integer two = new Integer(2);
static final Integer three = new Integer(3);
static final Integer four = new Integer(4);
static final Integer five = new Integer(5);
static final Integer six = new Integer(6);
static final Integer seven = new Integer(7);
static final Integer eight = new Integer(8);
static final Integer nine = new Integer(9);
static final Integer m1 = new Integer(-1);
static final Integer m2 = new Integer(-2);
static final Integer m3 = new Integer(-3);
static final Integer m4 = new Integer(-4);
static final Integer m5 = new Integer(-5);
static final Integer m6 = new Integer(-6);
static final Integer m10 = new Integer(-10);
/**
* Create a map from Integers 1-5 to Strings "A"-"E".
*/
private static Jsr166HashMap map5() {
Jsr166HashMap map = new Jsr166HashMap(5);
assertTrue(map.isEmpty());
map.put(one, "A");
map.put(two, "B");
map.put(three, "C");
map.put(four, "D");
map.put(five, "E");
assertFalse(map.isEmpty());
assertEquals(5, map.size());
return map;
}
/**
* clear removes all pairs
*/
public void testClear() {
Jsr166HashMap map = map5();
map.clear();
assertEquals(map.size(), 0);
}
/**
* Maps with same contents are equal
*/
public void testEquals() {
Jsr166HashMap map1 = map5();
Jsr166HashMap map2 = map5();
assertEquals(map1, map2);
assertEquals(map2, map1);
map1.clear();
assertFalse(map1.equals(map2));
assertFalse(map2.equals(map1));
}
/**
* containsKey returns true for contained key
*/
public void testContainsKey() {
Jsr166HashMap map = map5();
assertTrue(map.containsKey(one));
assertFalse(map.containsKey(zero));
}
/**
* containsValue returns true for held values
*/
public void testContainsValue() {
Jsr166HashMap map = map5();
assertTrue(map.containsValue("A"));
assertFalse(map.containsValue("Z"));
}
/**
* get returns the correct element at the given key, or null if not present
*/
public void testGet() {
Jsr166HashMap map = map5();
assertEquals("A", (String) map.get(one));
Jsr166HashMap empty = new Jsr166HashMap();
assertNull(map.get("anything"));
}
/**
* isEmpty is true of empty map and false for non-empty
*/
public void testIsEmpty() {
Jsr166HashMap empty = new Jsr166HashMap();
Jsr166HashMap map = map5();
assertTrue(empty.isEmpty());
assertFalse(map.isEmpty());
}
/**
* keySet returns a Set containing all the keys
*/
public void testKeySet() {
Jsr166HashMap map = map5();
Set s = map.keySet();
assertEquals(5, s.size());
assertTrue(s.contains(one));
assertTrue(s.contains(two));
assertTrue(s.contains(three));
assertTrue(s.contains(four));
assertTrue(s.contains(five));
}
/**
* keySet.toArray returns contains all keys
*/
public void testKeySetToArray() {
Jsr166HashMap map = map5();
Set s = map.keySet();
Object[] ar = s.toArray();
assertTrue(s.containsAll(Arrays.asList(ar)));
assertEquals(5, ar.length);
ar[0] = m10;
assertFalse(s.containsAll(Arrays.asList(ar)));
}
/**
* Values.toArray contains all values
*/
public void testValuesToArray() {
Jsr166HashMap map = map5();
Collection v = map.values();
Object[] ar = v.toArray();
ArrayList s = new ArrayList(Arrays.asList(ar));
assertEquals(5, ar.length);
assertTrue(s.contains("A"));
assertTrue(s.contains("B"));
assertTrue(s.contains("C"));
assertTrue(s.contains("D"));
assertTrue(s.contains("E"));
}
/**
* entrySet.toArray contains all entries
*/
public void testEntrySetToArray() {
Jsr166HashMap map = map5();
Set s = map.entrySet();
Object[] ar = s.toArray();
assertEquals(5, ar.length);
for (int i = 0; i < 5; ++i) {
assertTrue(map.containsKey(((Map.Entry) (ar[i])).getKey()));
assertTrue(map.containsValue(((Map.Entry) (ar[i])).getValue()));
}
}
/**
* values collection contains all values
*/
public void testValues() {
Jsr166HashMap map = map5();
Collection s = map.values();
assertEquals(5, s.size());
assertTrue(s.contains("A"));
assertTrue(s.contains("B"));
assertTrue(s.contains("C"));
assertTrue(s.contains("D"));
assertTrue(s.contains("E"));
}
/**
* entrySet contains all pairs
*/
public void testEntrySet() {
Jsr166HashMap map = map5();
Set s = map.entrySet();
assertEquals(5, s.size());
Iterator it = s.iterator();
while (it.hasNext()) {
Map.Entry e = (Map.Entry) it.next();
assertTrue(
(e.getKey().equals(one) && e.getValue().equals("A")) ||
(e.getKey().equals(two) && e.getValue().equals("B"))
||
(e.getKey().equals(three) && e.getValue()
.equals("C")) ||
(e.getKey().equals(four) && e.getValue()
.equals("D")) ||
(e.getKey().equals(five) && e.getValue()
.equals("E")));
}
}
/**
* putAll adds all key-value pairs from the given map
*/
public void testPutAll() {
Jsr166HashMap empty = new Jsr166HashMap();
Jsr166HashMap map = map5();
empty.putAll(map);
assertEquals(5, empty.size());
assertTrue(empty.containsKey(one));
assertTrue(empty.containsKey(two));
assertTrue(empty.containsKey(three));
assertTrue(empty.containsKey(four));
assertTrue(empty.containsKey(five));
}
/**
* putIfAbsent works when the given key is not present
*/
public void testPutIfAbsent() {
Jsr166HashMap map = map5();
map.putIfAbsent(six, "Z");
assertTrue(map.containsKey(six));
}
/**
* putIfAbsent does not add the pair if the key is already present
*/
public void testPutIfAbsent2() {
Jsr166HashMap map = map5();
assertEquals("A", map.putIfAbsent(one, "Z"));
}
/**
* replace fails when the given key is not present
*/
public void testReplace() {
Jsr166HashMap map = map5();
assertNull(map.replace(six, "Z"));
assertFalse(map.containsKey(six));
}
/**
* replace succeeds if the key is already present
*/
public void testReplace2() {
Jsr166HashMap map = map5();
assertNotNull(map.replace(one, "Z"));
assertEquals("Z", map.get(one));
}
/**
* replace value fails when the given key not mapped to expected value
*/
public void testReplaceValue() {
Jsr166HashMap map = map5();
assertEquals("A", map.get(one));
assertFalse(map.replace(one, "Z", "Z"));
assertEquals("A", map.get(one));
}
/**
* replace value succeeds when the given key mapped to expected value
*/
public void testReplaceValue2() {
Jsr166HashMap map = map5();
assertEquals("A", map.get(one));
assertTrue(map.replace(one, "A", "Z"));
assertEquals("Z", map.get(one));
}
/**
* remove removes the correct key-value pair from the map
*/
public void testRemove() {
Jsr166HashMap map = map5();
map.remove(five);
assertEquals(4, map.size());
assertFalse(map.containsKey(five));
}
/**
* remove(key,value) removes only if pair present
*/
public void testRemove2() {
Jsr166HashMap map = map5();
map.remove(five, "E");
assertEquals(4, map.size());
assertFalse(map.containsKey(five));
map.remove(four, "A");
assertEquals(4, map.size());
assertTrue(map.containsKey(four));
}
/**
* size returns the correct values
*/
public void testSize() {
Jsr166HashMap map = map5();
Jsr166HashMap empty = new Jsr166HashMap();
assertEquals(0, empty.size());
assertEquals(5, map.size());
}
/**
* toString contains toString of elements
*/
public void testToString() {
Jsr166HashMap map = map5();
String s = map.toString();
for (int i = 1; i <= 5; ++i) {
assertTrue(s.indexOf(String.valueOf(i)) >= 0);
}
}
// Exception tests
/**
* Cannot create with negative capacity
*/
public void testConstructor1() {
try {
new Jsr166HashMap(-1, 0, 1);
shouldThrow();
} catch (IllegalArgumentException e) {
}
}
/**
* Cannot create with negative concurrency level
*/
public void testConstructor2() {
try {
new Jsr166HashMap(1, 0, -1);
shouldThrow();
} catch (IllegalArgumentException e) {
}
}
/**
* Cannot create with only negative capacity
*/
public void testConstructor3() {
try {
new Jsr166HashMap(-1);
shouldThrow();
} catch (IllegalArgumentException e) {
}
}
/**
* get(null) throws NPE
*/
public void testGet_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.get(null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* containsKey(null) throws NPE
*/
public void testContainsKey_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.containsKey(null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* containsValue(null) throws NPE
*/
public void testContainsValue_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.containsValue(null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* put(null,x) throws NPE
*/
public void testPut1_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.put(null, "whatever");
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* put(x, null) throws NPE
*/
public void testPut2_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.put("whatever", null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* putIfAbsent(null, x) throws NPE
*/
public void testPutIfAbsent1_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.putIfAbsent(null, "whatever");
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* replace(null, x) throws NPE
*/
public void testReplace_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.replace(null, "whatever");
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* replace(null, x, y) throws NPE
*/
public void testReplaceValue_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.replace(null, one, "whatever");
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* putIfAbsent(x, null) throws NPE
*/
public void testPutIfAbsent2_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.putIfAbsent("whatever", null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* replace(x, null) throws NPE
*/
public void testReplace2_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.replace("whatever", null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* replace(x, null, y) throws NPE
*/
public void testReplaceValue2_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.replace("whatever", null, "A");
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* replace(x, y, null) throws NPE
*/
public void testReplaceValue3_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.replace("whatever", one, null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* remove(null) throws NPE
*/
public void testRemove1_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.put("sadsdf", "asdads");
c.remove(null);
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* remove(null, x) throws NPE
*/
public void testRemove2_NullPointerException() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.put("sadsdf", "asdads");
c.remove(null, "whatever");
shouldThrow();
} catch (NullPointerException e) {
}
}
/**
* remove(x, null) returns false
*/
public void testRemove3() {
try {
Jsr166HashMap c = new Jsr166HashMap(5);
c.put("sadsdf", "asdads");
assertFalse(c.remove("sadsdf", null));
} catch (NullPointerException e) {
fail();
}
}
/**
* A deserialized map equals original
*/
// TODO: why not use SerializableTester?
public void testSerialization() {
Jsr166HashMap q = map5();
try {
ByteArrayOutputStream bout = new ByteArrayOutputStream(10000);
ObjectOutputStream out =
new ObjectOutputStream(new BufferedOutputStream(bout));
out.writeObject(q);
out.close();
ByteArrayInputStream bin =
new ByteArrayInputStream(bout.toByteArray());
ObjectInputStream in =
new ObjectInputStream(new BufferedInputStream(bin));
Jsr166HashMap r = (Jsr166HashMap) in.readObject();
assertEquals(q.size(), r.size());
assertTrue(q.equals(r));
assertTrue(r.equals(q));
} catch (Exception e) {
e.printStackTrace();
unexpectedException();
}
}
/**
* SetValue of an EntrySet entry sets value in the map.
*/
public void testSetValueWriteThrough() {
// Adapted from a bug report by Eric Zoerner
Jsr166HashMap map = new Jsr166HashMap(2, 5.0f, 1);
assertTrue(map.isEmpty());
for (int i = 0; i < 20; i++) {
map.put(new Integer(i), new Integer(i));
}
assertFalse(map.isEmpty());
Map.Entry entry1 = (Map.Entry) map.entrySet().iterator().next();
// assert that entry1 is not 16
assertTrue("entry is 16, test not valid",
!entry1.getKey().equals(new Integer(16)));
// remove 16 (a different key) from map
// which just happens to cause entry1 to be cloned in map
map.remove(new Integer(16));
entry1.setValue("XYZ");
assertTrue(map.containsValue("XYZ")); // fails
}
}
| |
/*
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2004 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2011/05/15 Support for exclusions
// ZAP: 2011/10/29 Support for parameters
// ZAP: 2012/03/15 Changed the parameter's type of the method
// removeDatabaseListener to DatabaseListener instead of SpiderListener. Removed
// unnecessary cast in the method notifyListenerDatabaseOpen.
// ZAP: 2012/05/02 Added the method createSingleton and changed the method
// getSingleton to use it.
// ZAP: 2012/06/11 Added JavaDoc to the method close(boolean), changed the
// method close(boolean) to call the method close(boolean, boolean), added
// method close(boolean, boolean).
// ZAP: 2012/07/16 Removed unused setters.
// ZAP: 2012/10/02 Issue 385: Added support for Contexts
// ZAP: 2013/03/03 Issue 546: Remove all template Javadoc comments
// ZAP: 2015/02/05 Issue 1524: New Persist Session dialog
// ZAP: 2015/02/09 Issue 1525: Introduce a database interface layer to allow for alternative implementations
// ZAP: 2015/04/02 Issue 1582: Low memory option
package org.parosproxy.paros.db.paros;
import java.io.File;
import java.sql.SQLException;
import java.util.Vector;
import org.apache.log4j.Logger;
import org.parosproxy.paros.db.Database;
import org.parosproxy.paros.db.DatabaseException;
import org.parosproxy.paros.db.DatabaseListener;
import org.parosproxy.paros.db.DatabaseServer;
import org.parosproxy.paros.db.DatabaseUnsupportedException;
import org.parosproxy.paros.db.TableAlert;
import org.parosproxy.paros.db.TableContext;
import org.parosproxy.paros.db.TableHistory;
import org.parosproxy.paros.db.TableParam;
import org.parosproxy.paros.db.TableScan;
import org.parosproxy.paros.db.TableSession;
import org.parosproxy.paros.db.TableSessionUrl;
import org.parosproxy.paros.db.TableStructure;
import org.parosproxy.paros.db.TableTag;
public class ParosDatabase implements Database {
private ParosDatabaseServer databaseServer = null;
private TableHistory tableHistory = null;
private TableSession tableSession = null;
private TableAlert tableAlert = null;
private TableScan tableScan = null;
// ZAP: Added TableTag
private TableTag tableTag = null;
// ZAP: Added TableSessionUrl.
private TableSessionUrl tableSessionUrl = null;
// ZAP: Added TableParam.
private TableParam tableParam = null;
private TableContext tableContext = null;
private TableStructure tableStructure = null;
// ZAP: Added Logger.
private static final Logger log = Logger.getLogger(ParosDatabase.class);
// ZAP: Added type arguments.
private Vector<DatabaseListener> listenerList = new Vector<>();
public ParosDatabase() {
tableHistory = new ParosTableHistory();
tableSession = new ParosTableSession();
tableAlert = new ParosTableAlert();
tableScan = new ParosTableScan();
// ZAP: Added statement.
tableTag = new ParosTableTag();
// ZAP: Added statement.
tableSessionUrl = new ParosTableSessionUrl();
// ZAP: Added statement.
tableParam = new ParosTableParam();
tableContext = new ParosTableContext();
tableStructure = new ParosTableStructure();
addDatabaseListener(tableHistory);
addDatabaseListener(tableSession);
addDatabaseListener(tableAlert);
addDatabaseListener(tableScan);
// ZAP: Added statement.
addDatabaseListener(tableTag);
// ZAP: Added statement.
addDatabaseListener(tableSessionUrl);
// ZAP: Added statement.
addDatabaseListener(tableParam);
addDatabaseListener(tableContext);
addDatabaseListener(tableStructure);
}
/**
* @return Returns the databaseServer
*/
@Override
public DatabaseServer getDatabaseServer() {
return databaseServer;
}
/**
* @param databaseServer The databaseServer to set.
*/
private void setDatabaseServer(ParosDatabaseServer databaseServer) {
this.databaseServer = databaseServer;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableHistory()
*/
@Override
public TableHistory getTableHistory() {
return tableHistory;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableSession()
*/
@Override
public TableSession getTableSession() {
return tableSession;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#addDatabaseListener(org.parosproxy.paros.db.DatabaseListener)
*/
@Override
public void addDatabaseListener(DatabaseListener listener) {
listenerList.add(listener);
}
// ZAP: Changed parameter's type from SpiderListener to DatabaseListener.
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#removeDatabaseListener(org.parosproxy.paros.db.DatabaseListener)
*/
@Override
public void removeDatabaseListener(DatabaseListener listener) {
listenerList.remove(listener);
}
private void notifyListenerDatabaseOpen() throws DatabaseException {
DatabaseListener listener = null;
for (int i=0;i<listenerList.size();i++) {
// ZAP: Removed unnecessary cast.
listener = listenerList.get(i);
try {
listener.databaseOpen(getDatabaseServer());
} catch (DatabaseUnsupportedException e) {
log.error(e.getMessage(), e);
}
}
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#open(java.lang.String)
*/
@Override
public void open(String path) throws ClassNotFoundException, Exception {
// ZAP: Added log statement.
log.debug("open " + path);
setDatabaseServer(new ParosDatabaseServer(path));
notifyListenerDatabaseOpen();
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#close(boolean)
*/
// ZAP: Added JavaDoc.
@Override
public void close(boolean compact) {
// ZAP: Moved the content of this method to the method close(boolean,
// boolean) and changed to call that method instead.
close(compact, true);
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#deleteSession(java.lang.String)
*/
@Override
public void deleteSession(String sessionName) {
log.debug("deleteSession " + sessionName);
if (databaseServer == null) {
return;
}
try {
databaseServer.shutdown(false);
} catch (SQLException e) {
log.error(e.getMessage(), e);
}
deleteDbFile(new File(sessionName));
deleteDbFile(new File(sessionName + ".data"));
deleteDbFile(new File(sessionName + ".script"));
deleteDbFile(new File(sessionName + ".properties"));
deleteDbFile(new File(sessionName + ".backup"));
deleteDbFile(new File(sessionName + ".lobs"));
databaseServer = null;
}
private void deleteDbFile (File file) {
log.debug("Deleting " + file.getAbsolutePath());
if (file.exists()) {
if (! file.delete()) {
log.error("Failed to delete " + file.getAbsolutePath());
}
}
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#close(boolean, boolean)
*/
// ZAP: Added method. Note: any change made to this method must have the
// ZAP comment as the content was moved from the paros method close(boolean).
@Override
public void close(boolean compact, boolean cleanup) {
// ZAP: Added statement.
log.debug("close");
if (databaseServer == null) return;
try {
// ZAP: Added if block.
if (cleanup) {
// perform clean up
getTableHistory().deleteTemporary();
}
// shutdown
databaseServer.shutdown(compact);
// ZAP: Changed to catch SQLException instead of Exception.
} catch (Exception e) {
// ZAP: Changed to log the exception.
log.error(e.getMessage(), e);
}
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableAlert()
*/
@Override
public TableAlert getTableAlert() {
return tableAlert;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#setTableAlert(org.parosproxy.paros.db.TableAlert)
*/
@Override
public void setTableAlert(TableAlert tableAlert) {
this.tableAlert = tableAlert;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableScan()
*/
@Override
public TableScan getTableScan() {
return tableScan;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#setTableScan(org.parosproxy.paros.db.TableScan)
*/
@Override
public void setTableScan(TableScan tableScan) {
this.tableScan = tableScan;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableTag()
*/
@Override
public TableTag getTableTag() {
return tableTag;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#setTableTag(org.parosproxy.paros.db.TableTag)
*/
@Override
public void setTableTag(TableTag tableTag) {
this.tableTag = tableTag;
}
// ZAP: Added method.
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableSessionUrl()
*/
@Override
public TableSessionUrl getTableSessionUrl() {
return tableSessionUrl;
}
// ZAP: Added method.
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#setTableSessionUrl(org.parosproxy.paros.db.TableSessionUrl)
*/
@Override
public void setTableSessionUrl(TableSessionUrl tableSessionUrl) {
this.tableSessionUrl = tableSessionUrl;
}
// ZAP: Added method.
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableParam()
*/
@Override
public TableParam getTableParam() {
return tableParam;
}
/* (non-Javadoc)
* @see org.parosproxy.paros.db.DatabaseIF#getTableContext()
*/
@Override
public TableContext getTableContext() {
return tableContext;
}
@Override
public TableStructure getTableStructure() {
return tableStructure;
}
@Override
public String getType() {
return Database.DB_TYPE_HSQLDB;
}
}
| |
package com.bdc.ociney.activity;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.RotateAnimation;
import android.widget.Toast;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.bdc.ociney.R;
import com.bdc.ociney.database.AccessBaseFavoris;
import com.bdc.ociney.database.BaseFavoris;
import com.bdc.ociney.fragment.TheaterMovieFragment;
import com.bdc.ociney.modele.Movie.Movie;
import com.bdc.ociney.modele.Theater.Theater;
import com.bdc.ociney.task.LoadTheaterMoviesTask;
import com.bdc.ociney.view.CellTheater;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class TheaterMovieActivity extends ActionBarActivity implements LoadTheaterMoviesTask.LoadTheaterMoviesTaskCallBack {
//public static Theater theaterStatic;
public static final String THEATER = "theater";
List<Movie> films = new ArrayList<Movie>();
ViewPager viewPager;
FragmentStatePagerAdapter adapter;
ViewGroup theater_view;
Menu menu;
boolean tournerRoulette;
private Theater theater;
Toolbar toolbar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_list_films_theater);
toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowTitleEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
toolbar.setTitleTextColor(Color.WHITE);
getSupportActionBar().setHomeAsUpIndicator(getResources().getDrawable(R.drawable.ab_back_mtrl_am_alpha));
try {
Intent intent = getIntent();
String jsonTheater = intent.getStringExtra(THEATER);
Gson gSon = new Gson();
theater = gSon.fromJson(jsonTheater, new TypeToken<Theater>() {
}.getType());
charger();
remplir();
getSupportActionBar().setTitle(theater.getName());
tournerRoulette(true);
new LoadTheaterMoviesTask(this).execute(theater);
viewPager.setAlpha(0f);
viewPager.setPageMargin((int) getResources().getDimensionPixelSize(R.dimen.theater_viewpager_negative_margin));
viewPager.setHorizontalFadingEdgeEnabled(true);
viewPager.setFadingEdgeLength(30);
} catch (Exception e) {
e.printStackTrace();
finish();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
this.menu = menu;
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.threater_menu, menu);
chargerFavoris();
return true;
}
private void chargerFavoris() {
if (theater != null && theater.getCode() != null) {
String code = theater.getCode();
AccessBaseFavoris db = new AccessBaseFavoris(this);
db.open();
if (db.isFavoris(code + "", BaseFavoris.TYPE_FAVORIS_CINEMA)) {
menu.getItem(0).setIcon(R.drawable.ic_favoris_on);
} else {
menu.getItem(0).setIcon(R.drawable.ic_favoris_off);
}
db.close();
}
}
@Override
public boolean onOptionsItemSelected(android.view.MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
super.onBackPressed();
return true;
case R.id.menu_favoris:
AccessBaseFavoris db = new AccessBaseFavoris(this);
db.open();
if (theater != null && theater.getCode() != null) {
String code = theater.getCode();
if (db.isFavoris(code, BaseFavoris.TYPE_FAVORIS_CINEMA)) {
db.deleteFavoris(code, BaseFavoris.TYPE_FAVORIS_CINEMA);
//Toast.makeText(this,"On supprime",Toast.LENGTH_SHORT).show();
} else {
db.insertFavoris(code, BaseFavoris.TYPE_FAVORIS_CINEMA);
//Toast.makeText(this,"On insere",Toast.LENGTH_SHORT).show();
}
}
db.close();
try {
invalidateOptionsMenu();
} catch (Exception e) {
e.printStackTrace();
}
return true;
}
return false;
}
public void charger() {
this.viewPager = (ViewPager) findViewById(R.id.poster_pager);
this.theater_view = (ViewGroup) findViewById(R.id.theater_view);
}
private void remplir() {
View cellTheater = View.inflate(this, R.layout.cell_theater, null);
CellTheater cell = new CellTheater();
cell.construire(this, cellTheater);
cell.construire(theater, 0);
this.theater_view.removeAllViews();
this.theater_view.addView(cellTheater);
cell.afficherEnEntier();
adapter = new FragmentStatePagerAdapter(this.getSupportFragmentManager()) {
HashMap<Integer, Fragment> fragments = new HashMap<Integer, Fragment>();
@Override
public Fragment getItem(int position) {
if (fragments.containsKey(position))
return fragments.get(position);
else {
if (position + 1 < getCount())
getItem(position + 1);//preload
Fragment f = TheaterMovieFragment.newInstance(films.get(position), theater);
fragments.put(position, f);
return f;
}
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
//super.destroyItem(container, position, object);
}
@Override
public int getCount() {
return films.size();
}
};
this.viewPager.setAdapter(adapter);
}
@Override
public void onLoadTheaterMoviesTaskCallBack(List<Movie> movies) {
films.clear();
films.addAll(movies);
adapter.notifyDataSetChanged();
tournerRoulette(false);
AnimatorSet anim = new AnimatorSet().setDuration(500);
anim.playTogether(
ObjectAnimator.ofFloat(viewPager, "alpha", 0f, 1f),
ObjectAnimator.ofFloat(viewPager, "translationX", 500f, 0f)
);
anim.start();
}
protected void tournerRoulette(boolean tourner) {
tournerRoulette(tourner, R.id.placeholder_image_loader);
}
protected void tournerRoulette(boolean tourner, int id) {
final View roulette = findViewById(id);
if (roulette != null) {
if (tourner) {
roulette.setVisibility(View.VISIBLE);
int previousDegrees = 0;
int degrees = 360;
final RotateAnimation animation = new RotateAnimation(previousDegrees, degrees,
Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f);
animation.setFillEnabled(true);
animation.setFillAfter(true);
animation.setDuration(1500);//Set the duration of the animation to 1 sec.
animation.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
if (tournerRoulette) {
roulette.startAnimation(animation);
} else
roulette.animate().alpha(0).start();
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
roulette.startAnimation(animation);
} else {
tournerRoulette = false;
}
}
}
@Override
public void onErreurReseau() {
Toast.makeText(getApplicationContext(), R.string.erreur_reseau, Toast.LENGTH_SHORT).show();
}
}
| |
/*
* Copyright (c) 2004-2005 Ant-Contrib project. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.antcontrib.design;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import org.apache.bcel.Constants;
import org.apache.bcel.classfile.ClassFormatException;
import org.apache.bcel.classfile.ClassParser;
import org.apache.bcel.classfile.Constant;
import org.apache.bcel.classfile.ConstantClass;
import org.apache.bcel.classfile.ConstantPool;
import org.apache.bcel.classfile.ConstantUtf8;
import org.apache.bcel.classfile.DescendingVisitor;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Utility;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.FileSet;
import org.apache.tools.ant.types.Path;
import org.apache.tools.ant.types.PatternSet;
import org.apache.tools.ant.util.JAXPUtils;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
/**
* @author <a href="mailto:dean@xsoftware.biz">Dean Hiller</a>
*/
public class VerifyDesignDelegate implements Log {
/**
* Field designFile.
*/
private File designFile;
/**
* Field paths.
*/
private final List<Path> paths = new ArrayList<Path>();
/**
* Field isCircularDesign.
*/
private boolean isCircularDesign = false;
/**
* Field deleteFiles.
*/
private boolean deleteFiles = false;
/**
* Field fillInBuildException.
*/
private boolean fillInBuildException = false;
/**
* Field needDeclarationsDefault.
*/
private boolean needDeclarationsDefault = true;
/**
* Field needDependsDefault.
*/
private boolean needDependsDefault = true;
/**
* Field task.
*/
private final Task task;
/**
* Field design.
*/
private Design design;
/**
* Field primitives.
*/
private final Set<String> primitives = new HashSet<String>();
/**
* Field designErrors.
*/
private final List<BuildException> designErrors = new ArrayList<BuildException>();
/**
* Field verifiedAtLeastOne.
*/
private boolean verifiedAtLeastOne = false;
/**
* Constructor for VerifyDesignDelegate.
*
* @param task Task
*/
public VerifyDesignDelegate(Task task) {
this.task = task;
primitives.add("B");
primitives.add("C");
primitives.add("D");
primitives.add("F");
primitives.add("I");
primitives.add("J");
primitives.add("S");
primitives.add("Z");
}
/**
* Method addConfiguredPath.
*
* @param path Path
*/
public void addConfiguredPath(Path path) {
paths.add(path);
}
/**
* Method setJar.
*
* @param f File
*/
public void setJar(File f) {
Path p = (Path) task.getProject().createDataType("path");
p.createPathElement().setLocation(f.getAbsoluteFile());
addConfiguredPath(p);
}
/**
* Method setDesign.
*
* @param f File
*/
public void setDesign(File f) {
this.designFile = f;
}
/**
* Method setCircularDesign.
*
* @param isCircularDesign boolean
*/
public void setCircularDesign(boolean isCircularDesign) {
this.isCircularDesign = isCircularDesign;
}
/**
* Method setDeleteFiles.
*
* @param deleteFiles boolean
*/
public void setDeleteFiles(boolean deleteFiles) {
this.deleteFiles = deleteFiles;
}
/**
* Method setFillInBuildException.
*
* @param b boolean
*/
public void setFillInBuildException(boolean b) {
fillInBuildException = b;
}
/**
* Method setNeedDeclarationsDefault.
*
* @param b boolean
*/
public void setNeedDeclarationsDefault(boolean b) {
needDeclarationsDefault = b;
}
/**
* Method setNeedDependsDefault.
*
* @param b boolean
*/
public void setNeedDependsDefault(boolean b) {
needDependsDefault = b;
}
/**
* Method execute.
*
* @throws BuildException if parsing of design file fails
*/
public void execute() throws BuildException {
if (!designFile.exists() || designFile.isDirectory()) {
throw new BuildException("design attribute in verifydesign element specified an invalid file="
+ designFile);
}
verifyJarFilesExist();
try {
XMLReader reader = JAXPUtils.getXMLReader();
DesignFileHandler ch = new DesignFileHandler(this, designFile,
isCircularDesign, task.getLocation());
ch.setNeedDeclarationsDefault(needDeclarationsDefault);
ch.setNeedDependsDefault(needDependsDefault);
reader.setContentHandler(ch);
//reader.setEntityResolver(ch);
//reader.setErrorHandler(ch);
//reader.setDTDHandler(ch);
log("about to start parsing file='" + designFile + "'", Project.MSG_INFO);
FileInputStream fileInput = new FileInputStream(designFile);
InputSource src = new InputSource(fileInput);
reader.parse(src);
design = ch.getDesign();
for (Path p : paths) {
verifyPathAdheresToDesign(design, p);
}
//only put unused errors if there are no other errors
//this is because you end up with false unused errors if you don't do this.
if (designErrors.isEmpty()) {
design.fillInUnusedPackages(designErrors);
}
if (!designErrors.isEmpty()) {
log(designErrors.size() + " errors.", Project.MSG_WARN);
if (!fillInBuildException) {
throw new BuildException("Design check failed due to previous errors");
}
throwAllErrors();
}
} catch (SAXException e) {
maybeDeleteFiles();
if (e.getException() != null
&& e.getException() instanceof RuntimeException) {
throw (RuntimeException) e.getException();
} else if (e instanceof SAXParseException) {
SAXParseException pe = (SAXParseException) e;
throw new BuildException("\nProblem parsing design file='"
+ designFile + "'. \nline=" + pe.getLineNumber()
+ " column=" + pe.getColumnNumber() + " Reason:\n"
+ e.getMessage() + "\n", e);
}
throw new BuildException("\nProblem parsing design file='"
+ designFile + "'. Reason:\n" + e, e);
} catch (IOException e) {
maybeDeleteFiles();
throw new RuntimeException("See attached exception", e);
// throw new BuildException("IOException on design file='"
// + designFile + "'. attached:", e);
} catch (RuntimeException e) {
maybeDeleteFiles();
throw e;
} finally {
}
if (!verifiedAtLeastOne) {
throw new BuildException("Did not find any class or jar files to verify");
}
}
//some auto builds like cruisecontrol can only report all the
//standard ant task errors and the build exceptions so here
//we need to fill in the buildexception so the errors are reported
//correctly through those tools....though, you think ant has a hook
//in that cruisecontrol is not using like LogListeners or something
/**
* Method throwAllErrors.
*/
private void throwAllErrors() {
StringBuilder result = new StringBuilder("Design check failed due to following errors");
for (BuildException be : designErrors) {
result.append("\n").append(be.getMessage());
}
throw new BuildException(result.toString());
}
/**
* Method verifyJarFilesExist.
*/
private void verifyJarFilesExist() {
for (Path p : paths) {
for (String fileName : p.list()) {
File file = new File(fileName);
if (!file.exists()) {
throw new BuildException(VisitorImpl.getNoFileMsg(file));
}
}
}
}
/**
* Method maybeDeleteFiles.
*/
private void maybeDeleteFiles() {
if (deleteFiles) {
log("Deleting all class and jar files so you do not get tempted to\n"
+ "use a jar that doesn't abide by the design (This option can\n"
+ "be turned off if you really want)", Project.MSG_INFO);
for (Path p : paths) {
deleteFilesInPath(p);
}
}
}
/**
* Method deleteFilesInPath.
*
* @param p Path
*/
private void deleteFilesInPath(Path p) {
for (String fileName : p.list()) {
File file = new File(fileName);
boolean deleted = file.delete();
if (!deleted) {
file.deleteOnExit();
}
}
}
/**
* Method verifyPathAdheresToDesign.
*
* @param d Design
* @param p Path
* @throws ClassFormatException if ClassParser fails
* @throws IOException if ClassParser fails
*/
private void verifyPathAdheresToDesign(Design d, Path p)
throws ClassFormatException, IOException {
for (String fileName : p.list()) {
File file = new File(fileName);
if (file.isDirectory()) {
FileSet set = new FileSet();
set.setDir(file);
set.setProject(task.getProject());
PatternSet.NameEntry entry1 = set.createInclude();
PatternSet.NameEntry entry2 = set.createInclude();
PatternSet.NameEntry entry3 = set.createInclude();
entry1.setName("**/*.class");
entry2.setName("**/*.jar");
entry3.setName("**/*.war");
DirectoryScanner scanner = set.getDirectoryScanner(task.getProject());
scanner.setBasedir(file);
String[] scannerFiles = scanner.getIncludedFiles();
for (String scannerFile : scannerFiles) {
verifyPartOfPath(scannerFile, new File(file, scannerFile), d);
}
} else {
verifyPartOfPath(fileName, file, d);
}
}
}
/**
* Method verifyPartOfPath.
*
* @param fileName String
* @param file File
* @param d Design
* @throws BuildException if a file that does not contain
* Java bytecode is supposed to be verified
* @throws ClassFormatException if ClassParser fails
* @throws IOException if ClassParser fails
*/
private void verifyPartOfPath(String fileName, File file, Design d)
throws BuildException, ClassFormatException, IOException {
if (fileName.endsWith(".jar") || fileName.endsWith(".war")) {
JarFile jarFile = new JarFile(file);
verifyJarAdheresToDesign(d, jarFile, file);
} else if (fileName.endsWith(".class")) {
verifyClassAdheresToDesign(d, file);
} else {
throw new BuildException("Only directories, jars, wars, and class files can be supplied to verify design, not file="
+ file.getAbsolutePath());
}
}
/**
* Method verifyClassAdheresToDesign.
*
* @param d Design
* @param classFile File
* @throws ClassFormatException if ClassParser fails
* @throws IOException if ClassParser fails
*/
private void verifyClassAdheresToDesign(Design d, File classFile)
throws ClassFormatException, IOException {
FileInputStream fis = null;
try {
fis = new FileInputStream(classFile);
verifyClassAdheresToDesign(d, fis, classFile.getAbsolutePath(), classFile);
} finally {
try {
if (fis != null) {
fis.close();
}
} catch (IOException e) {
//doh!!
}
}
}
/**
* Method verifyJarAdheresToDesign.
*
* @param d Design
* @param jarFile JarFile
* @param original File
* @throws ClassFormatException if ClassParser fails
* @throws IOException if ClassParser fails
*/
private void verifyJarAdheresToDesign(Design d, JarFile jarFile, File original)
throws ClassFormatException, IOException {
try {
Enumeration<JarEntry> en = jarFile.entries();
while (en.hasMoreElements()) {
ZipEntry entry = en.nextElement();
InputStream in = null;
if (entry.getName().endsWith(".class")) {
in = jarFile.getInputStream(entry);
try {
in = jarFile.getInputStream(entry);
verifyClassAdheresToDesign(d, in, entry.getName(), original);
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException e) {
// doh!!!
}
}
}
}
} finally {
try {
jarFile.close();
} catch (IOException e) {
//doh!!!
}
}
}
/**
* Method verifyClassAdheresToDesign.
*
* @param d Design
* @param in InputStream
* @param name String
* @param originalClassOrJarFile File
* @throws ClassFormatException if ClassParser fails
* @throws IOException if ClassParser fails
*/
private void verifyClassAdheresToDesign(Design d, InputStream in, String name, File originalClassOrJarFile)
throws ClassFormatException, IOException {
try {
verifiedAtLeastOne = true;
ClassParser parser = new ClassParser(in, name);
JavaClass javaClass = parser.parse();
String className = javaClass.getClassName();
if (!d.needEvalCurrentClass(className)) {
return;
}
ConstantPool pool = javaClass.getConstantPool();
processConstantPool(pool);
VisitorImpl visitor = new VisitorImpl(pool, this, d, task.getLocation());
DescendingVisitor desc = new DescendingVisitor(javaClass, visitor);
desc.visit();
} catch (BuildException e) {
log(Design.getWrapperMsg(originalClassOrJarFile, e.getMessage()), Project.MSG_ERR);
designErrors.add(e);
}
}
/**
* Method processConstantPool.
*
* @param pool ConstantPool
*/
private void processConstantPool(ConstantPool pool) {
Constant[] constants = pool.getConstantPool();
if (constants == null) {
log(" constants=null", Project.MSG_VERBOSE);
return;
}
log(" constants len=" + constants.length, Project.MSG_VERBOSE);
for (int i = 0; i < constants.length; i++) {
processConstant(pool, constants[i], i);
}
}
/**
* Method processConstant.
*
* @param pool ConstantPool
* @param c Constant
* @param i int
*/
private void processConstant(ConstantPool pool, Constant c, int i) {
if (c == null) {
//don't know why, but constant[0] seems to be always null.
return;
}
log(" const[" + i + "]=" + pool.constantToString(c) + " inst=" + c.getClass().getName(),
Project.MSG_DEBUG);
//reverse engineered from ConstantPool.constantToString..
if (c.getTag() == Constants.CONSTANT_Class) {
int ind = ((ConstantClass) c).getNameIndex();
c = pool.getConstant(ind, Constants.CONSTANT_Utf8);
String className = Utility.compactClassName(((ConstantUtf8) c).getBytes(), false);
log(" classNamePre=" + className, Project.MSG_DEBUG);
className = getRidOfArray(className);
String firstLetter = className.charAt(0) + "";
if (primitives.contains(firstLetter)) {
return;
}
log(" className=" + className, Project.MSG_VERBOSE);
design.checkClass(className);
}
}
/**
* Method getRidOfArray.
*
* @param className String
* @return String
*/
private static String getRidOfArray(String className) {
while (className.startsWith("[")) {
className = className.substring(1);
}
return className;
}
/**
* Method getPackageName.
*
* @param className String
* @return String
*/
public static String getPackageName(String className) {
String packageName = Package.DEFAULT;
int index = className.lastIndexOf(".");
if (index > 0) {
packageName = className.substring(0, index);
}
// TODO test the else scenario here (it is a corner case)...
return packageName;
}
/**
* Method log.
*
* @param msg String
* @param level int
* @see net.sf.antcontrib.design.Log#log(String, int)
*/
public void log(String msg, int level) {
//if(level == Project.MSG_WARN || level == Project.MSG_INFO
// || level == Project.MSG_ERR || level == Project.MSG_VERBOSE)
//VerifyDesignTest.log(msg);
task.log(msg, level);
}
}
| |
/*
* Copyright 2013 Abid Hasan Mujtaba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.abid_mujtaba.fetchheaders;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.speech.tts.TextToSpeech;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.ActionBarActivity;
import android.util.SparseArray;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.abid_mujtaba.fetchheaders.fragments.AccountFragment;
import com.abid_mujtaba.fetchheaders.interfaces.ToggleMenu;
import com.abid_mujtaba.fetchheaders.misc.ThreadPool;
import com.abid_mujtaba.fetchheaders.models.Account;
import com.abid_mujtaba.fetchheaders.models.Email;
import java.util.ArrayList;
import java.util.Locale;
public class MainActivity extends ActionBarActivity implements ToggleMenu, TextToSpeech.OnInitListener
{
private LinearLayout scrollList;
private Menu mMenu; // A handle to the Menu item
private ArrayList<AccountFragment> mFragments = new ArrayList<AccountFragment>(); // Stores all fragments added to this activity
private Handler mHandler = new Handler(); // Handler used to carry out UI actions from background threads
private boolean fShowSeen = false; // Flag which control whether seen emails should be displayed or not
private boolean fDisableMenu = false; // A flag that indicates whether the menu should be disabled or not. We want the menu disabled when we are performing certain tasks such as fetching emails
private String BUNDLE_FLAG_SHOW_SEEN = "BUNDLE_FLAG_SHOW_SEEN"; // Used as a key for the showSeen flag stored in the Bundle that saves state information when the activity is restarted (possibly because of screen rotation)
private TextToSpeech mTTS;
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
if (savedInstanceState != null) // If the passed in state information bundle is non-empty we expect it to contain the saved value of fShowSeen. We also pass in a default value.
{
fShowSeen = savedInstanceState.getBoolean(BUNDLE_FLAG_SHOW_SEEN, false);
}
scrollList = (LinearLayout) findViewById(R.id.scrollList);
if (mTTS == null) // If onCreate is called multiple times we do NOT want to create multiple TextToSpeech objects
{
mTTS = new TextToSpeech(this, this);
}
if (Account.numberOfAccounts() > 0) // Accounts have been specified
{
TextView tvEmpty = (TextView) findViewById(R.id.txtNoAccounts); // We start by removing the No Accounts view since accounts are present
scrollList.removeView(tvEmpty);
FragmentManager fM = getSupportFragmentManager();
FragmentTransaction fT = fM.beginTransaction();
for (int ii = 0; ii < Account.numberOfAccounts(); ii++)
{
String tag = "TAG_" + ii; // This is the tag we will use to get a handle on the fragment in the FragmentManager
AccountFragment aF = (AccountFragment) fM.findFragmentByTag(tag); // We attempt to access the fragment via the specified tag
if (aF == null) // This indicates that the Fragment does not exist yet so we create it. It has setRetainInstance(true) so it persists across configuration changes.
{
aF = AccountFragment.newInstance(ii);
fT.add(R.id.scrollList, aF, tag); // Note: The addition to the scrollList only happens when aF == null, which happens when the persistent fragment has not been created yet
} // Since Views retain state across config changes the scrollList remembers that it has fragments added to it
mFragments.add(aF);
}
fT.commit();
}
}
@Override
protected void onDestroy()
{
if (mTTS != null) // mTTS needs to be properly shutdown otherwise the app will complain about a leaked service
{
mTTS.shutdown();
}
super.onDestroy();
}
@Override
protected void onStop()
{
if (mTTS != null)
{
mTTS.stop(); // We stop the TTS engine when the activity stops. This stops ALL current and queued TTS activity.
}
super.onStop();
}
@Override
public boolean onCreateOptionsMenu(Menu menu)
{
MenuInflater menuInflater = getMenuInflater();
menuInflater.inflate(R.menu.main_menu, menu);
mMenu = menu; // Store a handle to the Menu item.
if (fShowSeen) { mMenu.findItem(R.id.menu_show_seen).setTitle("Hide Seen"); } // Set Menu Item Title based on fShowSeen.
else { mMenu.findItem(R.id.menu_show_seen).setTitle("Show Seen"); } // Since this method is called every time the activity is recreated (including when the screen is rotated we check fShowSeen and then set the menu item title
return true;
}
// @Override
// public boolean onPrepareOptionsMenu(Menu menu)
// {
// super.onPrepareOptionsMenu(menu); // We make sure to carry out the actions we are overriding to carry out the requisite functionality
//
// if (fDisableMenu) { Toast.makeText(this, "Waiting ...", Toast.LENGTH_SHORT).show(); } // The menu is disabled and we apprise the user of this.
//
// return ! fDisableMenu; // Basically decide whether pressing the menu actually shows any items based on the fDisableMenu flag.
// }
private int toggleMenuCount = 0; // This integer is used to count the number of fragments who have asked to disable the menu. When this count is zero the menu is enabled.
public void enableMenu()
{
if (--toggleMenuCount == 0) // We predecrement the count of requests to disable the menu. If the count falls to zero we change the flag.
{
fDisableMenu = false;
}
}
public void disableMenu()
{
fDisableMenu = true; // Set the Disable Menu flag
toggleMenuCount++; // Increment the count to show that one more entity has asked for the menu to be disabled
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
switch (item.getItemId())
{
case R.id.menu_refresh:
refresh();
return true;
case R.id.menu_delete:
remove_deleted_emails();
return true;
case R.id.menu_accounts:
startActivity(new Intent("com.abid_mujtaba.fetchheaders.AccountsActivity"));
return true;
case R.id.menu_show_seen:
fShowSeen = ! fShowSeen;
if (fShowSeen) { mMenu.findItem(R.id.menu_show_seen).setTitle("Hide Seen"); } // Toggle Menu Item Title
else { mMenu.findItem(R.id.menu_show_seen).setTitle("Show Seen"); }
for (AccountFragment fragment: mFragments)
{
fragment.showSeen(fShowSeen);
}
return true;
case R.id.menu_speak:
speak_emails();
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected void onSaveInstanceState(Bundle outState)
{
super.onSaveInstanceState(outState);
outState.putBoolean(BUNDLE_FLAG_SHOW_SEEN, fShowSeen);
}
public void refresh() // Reloads the activity. Emails marked for deletion are NOT deleted.
{
Intent i = getIntent(); // We restart the MainActivity with the same intent it was started with. This causes emails to be fetched again. Emails deleted while issuing fragment.remove_emails_marked_for_deletion() will not appear.
finish();
startActivity(i);
}
public void remove_deleted_emails() // Method for removing emails marked for deletion from the UI
{
for (final AccountFragment fragment: mFragments)
{
Runnable delete = new Runnable() {
@Override
public void run()
{
fragment.remove_emails_marked_for_deletion(mHandler);
}
};
ThreadPool.executeTask(delete);
}
}
private void speak_emails()
{
for (int ii = 0; ii < Account.numberOfAccounts(); ii++)
{
Account account = Account.get(ii);
SparseArray<Email> emails = account.emails();
int num_emails = emails.size();
if (account.num_unseen_emails() > 0)
{
speak(account); // Speak Account name
for (int jj = 0; jj < num_emails; jj++)
{
Email email = emails.get(jj);
if (! email.seen())
{
speak(email);
}
}
}
}
}
@Override
public void onInit(int status)
{
if (status == TextToSpeech.SUCCESS)
{
int result = mTTS.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED)
{
Resources.Loge("This Language is not supported. result: " + result, null);
Resources.Loge("If result is -1 simply install 'Speech Synthesis Data Installer' from the Google Play Store.", null);
}
else { return; }
}
mTTS.stop();
mTTS.shutdown();
mTTS = null;
Resources.Loge("Initialization failed", null);
}
private void speak(Email email) // Method for applying TextToSpeech to an email
{
if (mTTS != null) // mTTS is set to null if the initialization fails
{
// NOTE: Adding periods inside the string introduces delays in the Speech synthesized from the text
String msg = String.format("From %s. %s.", email.from(), email.subject());
mTTS.speak(msg, TextToSpeech.QUEUE_ADD, null);
}
}
private void speak(Account account) // Method for applying TextToSpeech to an account
{
if (mTTS != null)
{
String msg = String.format(".Account %s.", account.name());
mTTS.speak(msg, TextToSpeech.QUEUE_ADD, null);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.shard.ShardStateMetaData;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.*;
/**
*/
public class MultiDataPathUpgrader {
private final NodeEnvironment nodeEnvironment;
private final ESLogger logger = Loggers.getLogger(getClass());
/**
* Creates a new upgrader instance
* @param nodeEnvironment the node env to operate on.
*
*/
public MultiDataPathUpgrader(NodeEnvironment nodeEnvironment) {
this.nodeEnvironment = nodeEnvironment;
}
/**
* Upgrades the given shard Id from multiple shard paths into the given target path.
*
* @see #pickShardPath(org.elasticsearch.index.shard.ShardId)
*/
public void upgrade(ShardId shard, ShardPath targetPath) throws IOException {
final Path[] paths = nodeEnvironment.availableShardPaths(shard); // custom data path doesn't need upgrading
if (isTargetPathConfigured(paths, targetPath) == false) {
throw new IllegalArgumentException("shard path must be one of the shards data paths");
}
assert needsUpgrading(shard) : "Should not upgrade a path that needs no upgrading";
logger.info("{} upgrading multi data dir to {}", shard, targetPath.getDataPath());
final ShardStateMetaData loaded = ShardStateMetaData.FORMAT.loadLatestState(logger, paths);
if (loaded == null) {
throw new IllegalStateException(shard + " no shard state found in any of: " + Arrays.toString(paths) + " please check and remove them if possible");
}
logger.info("{} loaded shard state {}", shard, loaded);
ShardStateMetaData.FORMAT.write(loaded, loaded.version, targetPath.getShardStatePath());
Files.createDirectories(targetPath.resolveIndex());
try (SimpleFSDirectory directory = new SimpleFSDirectory(targetPath.resolveIndex())) {
try (final Lock lock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
upgradeFiles(shard, targetPath, targetPath.resolveIndex(), ShardPath.INDEX_FOLDER_NAME, paths);
} catch (LockObtainFailedException ex) {
throw new IllegalStateException("Can't obtain lock on " + targetPath.resolveIndex(), ex);
}
}
upgradeFiles(shard, targetPath, targetPath.resolveTranslog(), ShardPath.TRANSLOG_FOLDER_NAME, paths);
logger.info("{} wipe upgraded directories", shard);
for (Path path : paths) {
if (path.equals(targetPath.getShardStatePath()) == false) {
logger.info("{} wipe shard directories: [{}]", shard, path);
IOUtils.rm(path);
}
}
if (FileSystemUtils.files(targetPath.resolveIndex()).length == 0) {
throw new IllegalStateException("index folder [" + targetPath.resolveIndex() + "] is empty");
}
if (FileSystemUtils.files(targetPath.resolveTranslog()).length == 0) {
throw new IllegalStateException("translog folder [" + targetPath.resolveTranslog() + "] is empty");
}
}
/**
* Runs check-index on the target shard and throws an exception if it failed
*/
public void checkIndex(ShardPath targetPath) throws IOException {
BytesStreamOutput os = new BytesStreamOutput();
PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex());
final CheckIndex checkIndex = new CheckIndex(directory)) {
checkIndex.setInfoStream(out);
CheckIndex.Status status = checkIndex.checkIndex();
out.flush();
if (!status.clean) {
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
throw new IllegalStateException("index check failure");
}
}
}
/**
* Returns true iff the given shard needs upgrading.
*/
public boolean needsUpgrading(ShardId shard) {
final Path[] paths = nodeEnvironment.availableShardPaths(shard);
// custom data path doesn't need upgrading neither single path envs
if (paths.length > 1) {
int numPathsExist = 0;
for (Path path : paths) {
if (Files.exists(path.resolve(MetaDataStateFormat.STATE_DIR_NAME))) {
numPathsExist++;
if (numPathsExist > 1) {
return true;
}
}
}
}
return false;
}
/**
* Picks a target ShardPath to allocate and upgrade the given shard to. It picks the target based on a simple
* heuristic:
* <ul>
* <li>if the smallest datapath has 2x more space available that the shards total size the datapath with the most bytes for that shard is picked to minimize the amount of bytes to copy</li>
* <li>otherwise the largest available datapath is used as the target no matter how big of a slice of the shard it already holds.</li>
* </ul>
*/
public ShardPath pickShardPath(ShardId shard) throws IOException {
if (needsUpgrading(shard) == false) {
throw new IllegalStateException("Shard doesn't need upgrading");
}
final NodeEnvironment.NodePath[] paths = nodeEnvironment.nodePaths();
// if we need upgrading make sure we have all paths.
for (NodeEnvironment.NodePath path : paths) {
Files.createDirectories(path.resolve(shard));
}
final ShardFileInfo[] shardFileInfo = getShardFileInfo(shard, paths);
long totalBytesUsedByShard = 0;
long leastUsableSpace = Long.MAX_VALUE;
long mostUsableSpace = Long.MIN_VALUE;
assert shardFileInfo.length == nodeEnvironment.availableShardPaths(shard).length;
for (ShardFileInfo info : shardFileInfo) {
totalBytesUsedByShard += info.spaceUsedByShard;
leastUsableSpace = Math.min(leastUsableSpace, info.usableSpace + info.spaceUsedByShard);
mostUsableSpace = Math.max(mostUsableSpace, info.usableSpace + info.spaceUsedByShard);
}
if (mostUsableSpace < totalBytesUsedByShard) {
throw new IllegalStateException("Can't upgrade path available space: " + new ByteSizeValue(mostUsableSpace) + " required space: " + new ByteSizeValue(totalBytesUsedByShard));
}
ShardFileInfo target = shardFileInfo[0];
if (leastUsableSpace >= (2 * totalBytesUsedByShard)) {
for (ShardFileInfo info : shardFileInfo) {
if (info.spaceUsedByShard > target.spaceUsedByShard) {
target = info;
}
}
} else {
for (ShardFileInfo info : shardFileInfo) {
if (info.usableSpace > target.usableSpace) {
target = info;
}
}
}
return new ShardPath(false, target.path, target.path, IndexMetaData.INDEX_UUID_NA_VALUE /* we don't know */, shard);
}
private ShardFileInfo[] getShardFileInfo(ShardId shard, NodeEnvironment.NodePath[] paths) throws IOException {
final ShardFileInfo[] info = new ShardFileInfo[paths.length];
for (int i = 0; i < info.length; i++) {
Path path = paths[i].resolve(shard);
final long usabelSpace = getUsabelSpace(paths[i]);
info[i] = new ShardFileInfo(path, usabelSpace, getSpaceUsedByShard(path));
}
return info;
}
protected long getSpaceUsedByShard(Path path) throws IOException {
final long[] spaceUsedByShard = new long[] {0};
if (Files.exists(path)) {
Files.walkFileTree(path, new FileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (attrs.isRegularFile()) {
spaceUsedByShard[0] += attrs.size();
}
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
});
}
return spaceUsedByShard[0];
}
protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException {
FileStore fileStore = path.fileStore;
return fileStore.getUsableSpace();
}
static class ShardFileInfo {
final Path path;
final long usableSpace;
final long spaceUsedByShard;
ShardFileInfo(Path path, long usableSpace, long spaceUsedByShard) {
this.path = path;
this.usableSpace = usableSpace;
this.spaceUsedByShard = spaceUsedByShard;
}
}
private void upgradeFiles(ShardId shard, ShardPath targetPath, final Path targetDir, String folderName, Path[] paths) throws IOException {
List<Path> movedFiles = new ArrayList<>();
for (Path path : paths) {
if (path.equals(targetPath.getDataPath()) == false) {
final Path sourceDir = path.resolve(folderName);
if (Files.exists(sourceDir)) {
logger.info("{} upgrading [{}] from [{}] to [{}]", shard, folderName, sourceDir, targetDir);
try (DirectoryStream<Path> stream = Files.newDirectoryStream(sourceDir)) {
Files.createDirectories(targetDir);
for (Path file : stream) {
if (IndexWriter.WRITE_LOCK_NAME.equals(file.getFileName().toString()) || Files.isDirectory(file)) {
continue; // skip write.lock
}
logger.info("{} move file [{}] size: [{}]", shard, file.getFileName(), Files.size(file));
final Path targetFile = targetDir.resolve(file.getFileName());
/* We are pessimistic and do a copy first to the other path and then and atomic move to rename it such that
in the worst case the file exists twice but is never lost or half written.*/
final Path targetTempFile = Files.createTempFile(targetDir, "upgrade_", "_" + file.getFileName().toString());
Files.copy(file, targetTempFile, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING);
Files.move(targetTempFile, targetFile, StandardCopyOption.ATOMIC_MOVE); // we are on the same FS - this must work otherwise all bets are off
Files.delete(file);
movedFiles.add(targetFile);
}
}
}
}
}
if (movedFiles.isEmpty() == false) {
// fsync later it might be on disk already
logger.info("{} fsync files", shard);
for (Path moved : movedFiles) {
logger.info("{} syncing [{}]", shard, moved.getFileName());
IOUtils.fsync(moved, false);
}
logger.info("{} syncing directory [{}]", shard, targetDir);
IOUtils.fsync(targetDir, true);
}
}
/**
* Returns <code>true</code> iff the target path is one of the given paths.
*/
private boolean isTargetPathConfigured(final Path[] paths, ShardPath targetPath) {
for (Path path : paths) {
if (path.equals(targetPath.getDataPath())) {
return true;
}
}
return false;
}
/**
* Runs an upgrade on all shards located under the given node environment if there is more than 1 data.path configured
* otherwise this method will return immediately.
*/
public static void upgradeMultiDataPath(NodeEnvironment nodeEnv, ESLogger logger) throws IOException {
if (nodeEnv.nodeDataPaths().length > 1) {
final MultiDataPathUpgrader upgrader = new MultiDataPathUpgrader(nodeEnv);
final Set<String> allIndices = nodeEnv.findAllIndices();
for (String index : allIndices) {
for (ShardId shardId : findAllShardIds(nodeEnv.indexPaths(new Index(index)))) {
try (ShardLock lock = nodeEnv.shardLock(shardId, 0)) {
if (upgrader.needsUpgrading(shardId)) {
final ShardPath shardPath = upgrader.pickShardPath(shardId);
upgrader.upgrade(shardId, shardPath);
// we have to check if the index path exists since we might
// have only upgraded the shard state that is written under /indexname/shardid/_state
// in the case we upgraded a dedicated index directory index
if (Files.exists(shardPath.resolveIndex())) {
upgrader.checkIndex(shardPath);
}
} else {
logger.debug("{} no upgrade needed - already upgraded");
}
}
}
}
}
}
private static Set<ShardId> findAllShardIds(Path... locations) throws IOException {
final Set<ShardId> shardIds = new HashSet<>();
for (final Path location : locations) {
if (Files.isDirectory(location)) {
shardIds.addAll(findAllShardsForIndex(location));
}
}
return shardIds;
}
private static Set<ShardId> findAllShardsForIndex(Path indexPath) throws IOException {
Set<ShardId> shardIds = new HashSet<>();
if (Files.isDirectory(indexPath)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) {
String currentIndex = indexPath.getFileName().toString();
for (Path shardPath : stream) {
String fileName = shardPath.getFileName().toString();
if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) {
int shardId = Integer.parseInt(fileName);
ShardId id = new ShardId(currentIndex, shardId);
shardIds.add(id);
}
}
}
}
return shardIds;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.jsoninput;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.vfs2.FileObject;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.annotations.Step;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.fileinput.FileInputList;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* Store run-time data on the JsonInput step.
*/
@Step( id = "JsonInput", image = "JSI.svg", i18nPackageName = "org.pentaho.di.trans.steps.jsoninput",
name = "JsonInput.name", description = "JsonInput.description", categoryDescription = "JsonInput.category" )
public class JsonInputMeta extends BaseStepMeta implements StepMetaInterface {
private static Class<?> PKG = JsonInputMeta.class; // for i18n purposes, needed by Translator2!!
private static final String YES = "Y";
public static final String[] RequiredFilesDesc = new String[] { BaseMessages.getString( PKG, "System.Combo.No" ),
BaseMessages.getString( PKG, "System.Combo.Yes" ) };
public static final String[] RequiredFilesCode = new String[] { "N", "Y" };
/** Array of filenames */
private String[] fileName;
/** Wildcard or filemask (regular expression) */
private String[] fileMask;
/** Array of boolean values as string, indicating if a file is required. */
private String[] fileRequired;
/** Wildcard or filemask to exclude (regular expression) */
private String[] excludeFileMask;
/** Flag indicating that we should include the filename in the output */
private boolean includeFilename;
/** The name of the field in the output containing the filename */
private String filenameField;
/** Flag indicating that a row number field should be included in the output */
private boolean includeRowNumber;
/** The name of the field in the output containing the row number */
private String rowNumberField;
/** The maximum number or lines to read */
private long rowLimit;
/** The fields to import... */
private JsonInputField[] inputFields;
/** Is In fields */
private String valueField;
/** Is In fields */
private boolean inFields;
/** Is a File */
private boolean IsAFile;
/** Flag: add result filename **/
private boolean addResultFile;
/** Flag : do we ignore empty files */
private boolean IsIgnoreEmptyFile;
/** Array of boolean values as string, indicating if we need to fetch sub folders. */
private String[] includeSubFolders;
/** Flag : do not fail if no file */
private boolean doNotFailIfNoFile;
private boolean ignoreMissingPath;
/** Flag : read url as source */
private boolean readurl;
/** Additional fields **/
private String shortFileFieldName;
private String pathFieldName;
private String hiddenFieldName;
private String lastModificationTimeFieldName;
private String uriNameFieldName;
private String rootUriNameFieldName;
private String extensionFieldName;
private String sizeFieldName;
public JsonInputMeta() {
super(); // allocate BaseStepMeta
}
/**
* @return Returns the shortFileFieldName.
*/
public String getShortFileNameField() {
return shortFileFieldName;
}
/**
* @param field
* The shortFileFieldName to set.
*/
public void setShortFileNameField( String field ) {
shortFileFieldName = field;
}
/**
* @return Returns the pathFieldName.
*/
public String getPathField() {
return pathFieldName;
}
/**
* @param field
* The pathFieldName to set.
*/
public void setPathField( String field ) {
this.pathFieldName = field;
}
/**
* @return Returns the hiddenFieldName.
*/
public String isHiddenField() {
return hiddenFieldName;
}
/**
* @param field
* The hiddenFieldName to set.
*/
public void setIsHiddenField( String field ) {
hiddenFieldName = field;
}
/**
* @return Returns the lastModificationTimeFieldName.
*/
public String getLastModificationDateField() {
return lastModificationTimeFieldName;
}
/**
* @param field
* The lastModificationTimeFieldName to set.
*/
public void setLastModificationDateField( String field ) {
lastModificationTimeFieldName = field;
}
/**
* @return Returns the uriNameFieldName.
*/
public String getUriField() {
return uriNameFieldName;
}
/**
* @param field
* The uriNameFieldName to set.
*/
public void setUriField( String field ) {
uriNameFieldName = field;
}
/**
* @return Returns the uriNameFieldName.
*/
public String getRootUriField() {
return rootUriNameFieldName;
}
/**
* @param field
* The rootUriNameFieldName to set.
*/
public void setRootUriField( String field ) {
rootUriNameFieldName = field;
}
/**
* @return Returns the extensionFieldName.
*/
public String getExtensionField() {
return extensionFieldName;
}
/**
* @param field
* The extensionFieldName to set.
*/
public void setExtensionField( String field ) {
extensionFieldName = field;
}
/**
* @return Returns the sizeFieldName.
*/
public String getSizeField() {
return sizeFieldName;
}
/**
* @param field
* The sizeFieldName to set.
*/
public void setSizeField( String field ) {
sizeFieldName = field;
}
/**
* @return the add result filesname flag
*/
public boolean addResultFile() {
return addResultFile;
}
/**
* @return the readurl flag
*/
public boolean isReadUrl() {
return readurl;
}
/**
* @param readurl
* the readurl flag to set
*/
public void setReadUrl( boolean readurl ) {
this.readurl = readurl;
}
public void setAddResultFile( boolean addResultFile ) {
this.addResultFile = addResultFile;
}
/**
* @return Returns the input fields.
*/
public JsonInputField[] getInputFields() {
return inputFields;
}
/**
* @param inputFields
* The input fields to set.
*/
public void setInputFields( JsonInputField[] inputFields ) {
this.inputFields = inputFields;
}
/**
* @return Returns the excludeFileMask.
*/
public String[] getExludeFileMask() {
return excludeFileMask;
}
/**
* @param excludeFileMask
* The excludeFileMask to set.
*/
public void setExcludeFileMask( String[] excludeFileMask ) {
this.excludeFileMask = excludeFileMask;
}
/**
* Get field value.
*/
public String getFieldValue() {
return valueField;
}
/**
* Set field field.
*/
public void setFieldValue( String value ) {
this.valueField = value;
}
/**
* Get the IsInFields.
*/
public boolean isInFields() {
return inFields;
}
/**
* @param inFields
* set the inFields.
*/
public void setInFields( boolean inFields ) {
this.inFields = inFields;
}
/**
* @return Returns the fileMask.
*/
public String[] getFileMask() {
return fileMask;
}
/**
* @param fileMask
* The fileMask to set.
*/
public void setFileMask( String[] fileMask ) {
this.fileMask = fileMask;
}
public String[] getFileRequired() {
return fileRequired;
}
public void setFileRequired( String[] fileRequiredin ) {
for ( int i = 0; i < fileRequiredin.length; i++ ) {
this.fileRequired[i] = getRequiredFilesCode( fileRequiredin[i] );
}
}
public void setIncludeSubFolders( String[] includeSubFoldersin ) {
for ( int i = 0; i < includeSubFoldersin.length; i++ ) {
this.includeSubFolders[i] = getRequiredFilesCode( includeSubFoldersin[i] );
}
}
/**
* @return Returns the fileName.
*/
public String[] getFileName() {
return fileName;
}
/**
* @param fileName
* The fileName to set.
*/
public void setFileName( String[] fileName ) {
this.fileName = fileName;
}
/**
* @return Returns the filenameField.
*/
public String getFilenameField() {
return filenameField;
}
/**
* @param filenameField
* The filenameField to set.
*/
public void setFilenameField( String filenameField ) {
this.filenameField = filenameField;
}
/**
* @return Returns the includeFilename.
*/
public boolean includeFilename() {
return includeFilename;
}
/**
* @param includeFilename
* The includeFilename to set.
*/
public void setIncludeFilename( boolean includeFilename ) {
this.includeFilename = includeFilename;
}
/**
* @return Returns the includeRowNumber.
*/
public boolean includeRowNumber() {
return includeRowNumber;
}
/**
* @param includeRowNumber
* The includeRowNumber to set.
*/
public void setIncludeRowNumber( boolean includeRowNumber ) {
this.includeRowNumber = includeRowNumber;
}
/**
* @return Returns the rowLimit.
*/
public long getRowLimit() {
return rowLimit;
}
/**
* @param rowLimit
* The rowLimit to set.
*/
public void setRowLimit( long rowLimit ) {
this.rowLimit = rowLimit;
}
/**
* @return the IsIgnoreEmptyFile flag
*/
public boolean isIgnoreEmptyFile() {
return IsIgnoreEmptyFile;
}
/**
* @param IsIgnoreEmptyFile
* the IsIgnoreEmptyFile to set
*/
public void setIgnoreEmptyFile( boolean IsIgnoreEmptyFile ) {
this.IsIgnoreEmptyFile = IsIgnoreEmptyFile;
}
/**
* @return the doNotFailIfNoFile flag
*/
public boolean isdoNotFailIfNoFile() {
return doNotFailIfNoFile;
}
/**
* @param doNotFailIfNoFile
* the doNotFailIfNoFile to set
*/
public void setdoNotFailIfNoFile( boolean doNotFailIfNoFile ) {
this.doNotFailIfNoFile = doNotFailIfNoFile;
}
/**
* @return the ignoreMissingPath flag
*/
public boolean isIgnoreMissingPath() {
return ignoreMissingPath;
}
/**
* @param ignoreMissingPath
* the ignoreMissingPath to set
*/
public void setIgnoreMissingPath( boolean ignoreMissingPath ) {
this.ignoreMissingPath = ignoreMissingPath;
}
/**
* @return Returns the rowNumberField.
*/
public String getRowNumberField() {
return rowNumberField;
}
/**
* @param rowNumberField
* The rowNumberField to set.
*/
public void setRowNumberField( String rowNumberField ) {
this.rowNumberField = rowNumberField;
}
public boolean getIsAFile() {
return IsAFile;
}
public void setIsAFile( boolean IsAFile ) {
this.IsAFile = IsAFile;
}
public String[] getIncludeSubFolders() {
return includeSubFolders;
}
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode );
}
public Object clone() {
JsonInputMeta retval = (JsonInputMeta) super.clone();
int nrFiles = fileName.length;
int nrFields = inputFields.length;
retval.allocate( nrFiles, nrFields );
for ( int i = 0; i < nrFiles; i++ ) {
retval.fileName[i] = fileName[i];
retval.fileMask[i] = fileMask[i];
retval.excludeFileMask[i] = excludeFileMask[i];
retval.fileRequired[i] = fileRequired[i];
retval.includeSubFolders[i] = includeSubFolders[i];
}
for ( int i = 0; i < nrFields; i++ ) {
if ( inputFields[i] != null ) {
retval.inputFields[i] = (JsonInputField) inputFields[i].clone();
}
}
return retval;
}
public String getXML() {
StringBuffer retval = new StringBuffer( 400 );
retval.append( " " ).append( XMLHandler.addTagValue( "include", includeFilename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "include_field", filenameField ) );
retval.append( " " ).append( XMLHandler.addTagValue( "rownum", includeRowNumber ) );
retval.append( " " ).append( XMLHandler.addTagValue( "addresultfile", addResultFile ) );
retval.append( " " ).append( XMLHandler.addTagValue( "readurl", readurl ) );
retval.append( " " + XMLHandler.addTagValue( "IsIgnoreEmptyFile", IsIgnoreEmptyFile ) );
retval.append( " " + XMLHandler.addTagValue( "doNotFailIfNoFile", doNotFailIfNoFile ) );
retval.append( " " + XMLHandler.addTagValue( "ignoreMissingPath", ignoreMissingPath ) );
retval.append( " " ).append( XMLHandler.addTagValue( "rownum_field", rowNumberField ) );
retval.append( " <file>" ).append( Const.CR );
for ( int i = 0; i < fileName.length; i++ ) {
retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[i] ) );
}
retval.append( " </file>" ).append( Const.CR );
retval.append( " <fields>" ).append( Const.CR );
for ( int i = 0; i < inputFields.length; i++ ) {
JsonInputField field = inputFields[i];
retval.append( field.getXML() );
}
retval.append( " </fields>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) );
retval.append( " " ).append( XMLHandler.addTagValue( "IsInFields", inFields ) );
retval.append( " " ).append( XMLHandler.addTagValue( "IsAFile", IsAFile ) );
retval.append( " " ).append( XMLHandler.addTagValue( "valueField", valueField ) );
retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) );
return retval.toString();
}
public String getRequiredFilesDesc( String tt ) {
if ( Const.isEmpty( tt ) ) {
return RequiredFilesDesc[0];
}
if ( tt.equalsIgnoreCase( RequiredFilesCode[1] ) ) {
return RequiredFilesDesc[1];
} else {
return RequiredFilesDesc[0];
}
}
public String getRequiredFilesCode( String tt ) {
if ( tt == null ) {
return RequiredFilesCode[0];
}
if ( tt.equals( RequiredFilesDesc[1] ) ) {
return RequiredFilesCode[1];
} else {
return RequiredFilesCode[0];
}
}
private void readData( Node stepnode ) throws KettleXMLException {
try {
includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) );
filenameField = XMLHandler.getTagValue( stepnode, "include_field" );
addResultFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "addresultfile" ) );
readurl = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "readurl" ) );
IsIgnoreEmptyFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsIgnoreEmptyFile" ) );
ignoreMissingPath = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "ignoreMissingPath" ) );
doNotFailIfNoFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "doNotFailIfNoFile" ) );
includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) );
rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" );
Node filenode = XMLHandler.getSubNode( stepnode, "file" );
Node fields = XMLHandler.getSubNode( stepnode, "fields" );
int nrFiles = XMLHandler.countNodes( filenode, "name" );
int nrFields = XMLHandler.countNodes( fields, "field" );
allocate( nrFiles, nrFields );
for ( int i = 0; i < nrFiles; i++ ) {
Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i );
Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i );
Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i );
Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i );
Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i );
fileName[i] = XMLHandler.getNodeValue( filenamenode );
fileMask[i] = XMLHandler.getNodeValue( filemasknode );
excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode );
fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode );
includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode );
}
for ( int i = 0; i < nrFields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
JsonInputField field = new JsonInputField( fnode );
inputFields[i] = field;
}
// Is there a limit on the number of rows we process?
rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L );
inFields = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsInFields" ) );
IsAFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "IsAFile" ) );
valueField = XMLHandler.getTagValue( stepnode, "valueField" );
shortFileFieldName = XMLHandler.getTagValue( stepnode, "shortFileFieldName" );
pathFieldName = XMLHandler.getTagValue( stepnode, "pathFieldName" );
hiddenFieldName = XMLHandler.getTagValue( stepnode, "hiddenFieldName" );
lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" );
uriNameFieldName = XMLHandler.getTagValue( stepnode, "uriNameFieldName" );
rootUriNameFieldName = XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" );
extensionFieldName = XMLHandler.getTagValue( stepnode, "extensionFieldName" );
sizeFieldName = XMLHandler.getTagValue( stepnode, "sizeFieldName" );
} catch ( Exception e ) {
throw new KettleXMLException( BaseMessages.getString( PKG, "JsonInputMeta.Exception.ErrorLoadingXML", e
.toString() ) );
}
}
public void allocate( int nrfiles, int nrfields ) {
fileName = new String[nrfiles];
fileMask = new String[nrfiles];
excludeFileMask = new String[nrfiles];
fileRequired = new String[nrfiles];
includeSubFolders = new String[nrfiles];
inputFields = new JsonInputField[nrfields];
}
public void setDefault() {
shortFileFieldName = null;
pathFieldName = null;
hiddenFieldName = null;
lastModificationTimeFieldName = null;
uriNameFieldName = null;
rootUriNameFieldName = null;
extensionFieldName = null;
sizeFieldName = null;
IsIgnoreEmptyFile = false;
ignoreMissingPath = false;
doNotFailIfNoFile = true;
includeFilename = false;
filenameField = "";
includeRowNumber = false;
rowNumberField = "";
IsAFile = false;
addResultFile = false;
readurl = false;
int nrFiles = 0;
int nrFields = 0;
allocate( nrFiles, nrFields );
for ( int i = 0; i < nrFiles; i++ ) {
fileName[i] = "filename" + ( i + 1 );
fileMask[i] = "";
excludeFileMask[i] = "";
fileRequired[i] = RequiredFilesCode[0];
includeSubFolders[i] = RequiredFilesCode[0];
}
for ( int i = 0; i < nrFields; i++ ) {
inputFields[i] = new JsonInputField( "field" + ( i + 1 ) );
}
rowLimit = 0;
inFields = false;
valueField = "";
}
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
int i;
for ( i = 0; i < inputFields.length; i++ ) {
JsonInputField field = inputFields[i];
int type = field.getType();
if ( type == ValueMeta.TYPE_NONE ) {
type = ValueMeta.TYPE_STRING;
}
try {
ValueMetaInterface v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type );
v.setLength( field.getLength() );
v.setPrecision( field.getPrecision() );
v.setOrigin( name );
v.setConversionMask( field.getFormat() );
v.setDecimalSymbol( field.getDecimalSymbol() );
v.setGroupingSymbol( field.getGroupSymbol() );
v.setCurrencySymbol( field.getCurrencySymbol() );
r.addValueMeta( v );
} catch ( Exception e ) {
throw new KettleStepException( e );
}
}
if ( includeFilename ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( filenameField ), ValueMeta.TYPE_STRING );
v.setLength( 250 );
v.setPrecision( -1 );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( includeRowNumber ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( rowNumberField ), ValueMeta.TYPE_INTEGER );
v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 );
v.setOrigin( name );
r.addValueMeta( v );
}
// Add additional fields
if ( getShortFileNameField() != null && getShortFileNameField().length() > 0 ) {
ValueMetaInterface v =
new ValueMeta( space.environmentSubstitute( getShortFileNameField() ), ValueMeta.TYPE_STRING );
v.setLength( 100, -1 );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( getExtensionField() != null && getExtensionField().length() > 0 ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( getExtensionField() ), ValueMeta.TYPE_STRING );
v.setLength( 100, -1 );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( getPathField() != null && getPathField().length() > 0 ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( getPathField() ), ValueMeta.TYPE_STRING );
v.setLength( 100, -1 );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( getSizeField() != null && getSizeField().length() > 0 ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( getSizeField() ), ValueMeta.TYPE_INTEGER );
v.setOrigin( name );
v.setLength( 9 );
r.addValueMeta( v );
}
if ( isHiddenField() != null && isHiddenField().length() > 0 ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( isHiddenField() ), ValueMeta.TYPE_BOOLEAN );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( getLastModificationDateField() != null && getLastModificationDateField().length() > 0 ) {
ValueMetaInterface v =
new ValueMeta( space.environmentSubstitute( getLastModificationDateField() ), ValueMeta.TYPE_DATE );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( getUriField() != null && getUriField().length() > 0 ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( getUriField() ), ValueMeta.TYPE_STRING );
v.setLength( 100, -1 );
v.setOrigin( name );
r.addValueMeta( v );
}
if ( getRootUriField() != null && getRootUriField().length() > 0 ) {
ValueMetaInterface v = new ValueMeta( space.environmentSubstitute( getRootUriField() ), ValueMeta.TYPE_STRING );
v.setLength( 100, -1 );
v.setOrigin( name );
r.addValueMeta( v );
}
}
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
throws KettleException {
try {
includeFilename = rep.getStepAttributeBoolean( id_step, "include" );
filenameField = rep.getStepAttributeString( id_step, "include_field" );
addResultFile = rep.getStepAttributeBoolean( id_step, "addresultfile" );
readurl = rep.getStepAttributeBoolean( id_step, "readurl" );
IsIgnoreEmptyFile = rep.getStepAttributeBoolean( id_step, "IsIgnoreEmptyFile" );
ignoreMissingPath = rep.getStepAttributeBoolean( id_step, "ignoreMissingPath" );
doNotFailIfNoFile = rep.getStepAttributeBoolean( id_step, "doNotFailIfNoFile" );
includeRowNumber = rep.getStepAttributeBoolean( id_step, "rownum" );
rowNumberField = rep.getStepAttributeString( id_step, "rownum_field" );
rowLimit = rep.getStepAttributeInteger( id_step, "limit" );
int nrFiles = rep.countNrStepAttributes( id_step, "file_name" );
int nrFields = rep.countNrStepAttributes( id_step, "field_name" );
allocate( nrFiles, nrFields );
for ( int i = 0; i < nrFiles; i++ ) {
fileName[i] = rep.getStepAttributeString( id_step, i, "file_name" );
fileMask[i] = rep.getStepAttributeString( id_step, i, "file_mask" );
excludeFileMask[i] = rep.getStepAttributeString( id_step, i, "exclude_file_mask" );
fileRequired[i] = rep.getStepAttributeString( id_step, i, "file_required" );
includeSubFolders[i] = rep.getStepAttributeString( id_step, i, "include_subfolders" );
}
for ( int i = 0; i < nrFields; i++ ) {
JsonInputField field = new JsonInputField();
field.setName( rep.getStepAttributeString( id_step, i, "field_name" ) );
field.setPath( rep.getStepAttributeString( id_step, i, "field_path" ) );
field.setType( ValueMeta.getType( rep.getStepAttributeString( id_step, i, "field_type" ) ) );
field.setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) );
field.setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) );
field.setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) );
field.setGroupSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) );
field.setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) );
field.setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) );
field.setTrimType( JsonInputField
.getTrimTypeByCode( rep.getStepAttributeString( id_step, i, "field_trim_type" ) ) );
field.setRepeated( rep.getStepAttributeBoolean( id_step, i, "field_repeat" ) );
inputFields[i] = field;
}
inFields = rep.getStepAttributeBoolean( id_step, "IsInFields" );
IsAFile = rep.getStepAttributeBoolean( id_step, "IsAFile" );
valueField = rep.getStepAttributeString( id_step, "valueField" );
shortFileFieldName = rep.getStepAttributeString( id_step, "shortFileFieldName" );
pathFieldName = rep.getStepAttributeString( id_step, "pathFieldName" );
hiddenFieldName = rep.getStepAttributeString( id_step, "hiddenFieldName" );
lastModificationTimeFieldName = rep.getStepAttributeString( id_step, "lastModificationTimeFieldName" );
rootUriNameFieldName = rep.getStepAttributeString( id_step, "rootUriNameFieldName" );
extensionFieldName = rep.getStepAttributeString( id_step, "extensionFieldName" );
sizeFieldName = rep.getStepAttributeString( id_step, "sizeFieldName" );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "JsonInputMeta.Exception.ErrorReadingRepository" ), e );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
throws KettleException {
try {
rep.saveStepAttribute( id_transformation, id_step, "include", includeFilename );
rep.saveStepAttribute( id_transformation, id_step, "include_field", filenameField );
rep.saveStepAttribute( id_transformation, id_step, "addresultfile", addResultFile );
rep.saveStepAttribute( id_transformation, id_step, "readurl", readurl );
rep.saveStepAttribute( id_transformation, id_step, "IsIgnoreEmptyFile", IsIgnoreEmptyFile );
rep.saveStepAttribute( id_transformation, id_step, "ignoreMissingPath", ignoreMissingPath );
rep.saveStepAttribute( id_transformation, id_step, "doNotFailIfNoFile", doNotFailIfNoFile );
rep.saveStepAttribute( id_transformation, id_step, "rownum", includeRowNumber );
rep.saveStepAttribute( id_transformation, id_step, "rownum_field", rowNumberField );
rep.saveStepAttribute( id_transformation, id_step, "limit", rowLimit );
for ( int i = 0; i < fileName.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, "file_name", fileName[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "file_mask", fileMask[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "exclude_file_mask", excludeFileMask[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "file_required", fileRequired[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "include_subfolders", includeSubFolders[i] );
}
for ( int i = 0; i < inputFields.length; i++ ) {
JsonInputField field = inputFields[i];
rep.saveStepAttribute( id_transformation, id_step, i, "field_name", field.getName() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_path", field.getPath() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_type", field.getTypeDesc() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_format", field.getFormat() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_currency", field.getCurrencySymbol() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_decimal", field.getDecimalSymbol() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_group", field.getGroupSymbol() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_length", field.getLength() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", field.getPrecision() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_trim_type", field.getTrimTypeCode() );
rep.saveStepAttribute( id_transformation, id_step, i, "field_repeat", field.isRepeated() );
}
rep.saveStepAttribute( id_transformation, id_step, "IsInFields", inFields );
rep.saveStepAttribute( id_transformation, id_step, "IsAFile", IsAFile );
rep.saveStepAttribute( id_transformation, id_step, "valueField", valueField );
rep.saveStepAttribute( id_transformation, id_step, "shortFileFieldName", shortFileFieldName );
rep.saveStepAttribute( id_transformation, id_step, "pathFieldName", pathFieldName );
rep.saveStepAttribute( id_transformation, id_step, "hiddenFieldName", hiddenFieldName );
rep.saveStepAttribute( id_transformation, id_step, "lastModificationTimeFieldName", lastModificationTimeFieldName );
rep.saveStepAttribute( id_transformation, id_step, "uriNameFieldName", uriNameFieldName );
rep.saveStepAttribute( id_transformation, id_step, "rootUriNameFieldName", rootUriNameFieldName );
rep.saveStepAttribute( id_transformation, id_step, "extensionFieldName", extensionFieldName );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "JsonInputMeta.Exception.ErrorSavingToRepository", ""
+ id_step ), e );
}
}
public FileInputList getFiles( VariableSpace space ) {
return FileInputList.createFileList( space, fileName, fileMask, excludeFileMask, fileRequired,
includeSubFolderBoolean() );
}
private boolean[] includeSubFolderBoolean() {
int len = fileName.length;
boolean[] includeSubFolderBoolean = new boolean[len];
for ( int i = 0; i < len; i++ ) {
includeSubFolderBoolean[i] = YES.equalsIgnoreCase( includeSubFolders[i] );
}
return includeSubFolderBoolean;
}
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ) {
CheckResult cr;
if ( !isInFields() ) {
// See if we get input...
if ( input.length <= 0 ) {
cr =
new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.NoInputExpected" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.NoInput" ), stepMeta );
remarks.add( cr );
}
}
if ( getInputFields().length <= 0 ) {
cr =
new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.NoInputField" ), stepMeta );
remarks.add( cr );
}
if ( isInFields() ) {
if ( Const.isEmpty( getFieldValue() ) ) {
cr =
new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.NoField" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.FieldOk" ), stepMeta );
remarks.add( cr );
}
} else {
FileInputList fileInputList = getFiles( transMeta );
// String files[] = getFiles();
if ( fileInputList == null || fileInputList.getFiles().size() == 0 ) {
cr =
new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.NoFiles" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG,
"JsonInputMeta.CheckResult.FilesOk", "" + fileInputList.getFiles().size() ), stepMeta );
remarks.add( cr );
}
}
}
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans ) {
return new JsonInput( stepMeta, stepDataInterface, cnr, tr, trans );
}
public StepDataInterface getStepData() {
return new JsonInputData();
}
public boolean supportsErrorHandling() {
return true;
}
/**
* Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So
* what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file.
* For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like
* that.
*
* @param space
* the variable space to use
* @param definitions
* @param resourceNamingInterface
* @param repository
* The repository to optionally load other resources from (to be converted to XML)
* @param metaStore
* the metaStore in which non-kettle metadata could reside.
*
* @return the filename of the exported resource
*/
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore )
throws KettleException {
try {
// The object that we're modifying here is a copy of the original!
// So let's change the filename from relative to absolute by grabbing the file object...
// In case the name of the file comes from previous steps, forget about this!
//
List<String> newFilenames = new ArrayList<String>();
if ( !isInFields() ) {
FileInputList fileList = getFiles( space );
if ( fileList.getFiles().size() > 0 ) {
for ( FileObject fileObject : fileList.getFiles() ) {
// From : ${Internal.Transformation.Filename.Directory}/../foo/bar.xml
// To : /home/matt/test/files/foo/bar.xml
//
// If the file doesn't exist, forget about this effort too!
//
if ( fileObject.exists() ) {
// Convert to an absolute path and add it to the list.
//
newFilenames.add( fileObject.getName().getPath() );
}
}
// Still here: set a new list of absolute filenames!
//
fileName = newFilenames.toArray( new String[newFilenames.size()] );
fileMask = new String[newFilenames.size()]; // all null since converted to absolute path.
fileRequired = new String[newFilenames.size()]; // all null, turn to "Y" :
for ( int i = 0; i < newFilenames.size(); i++ ) {
fileRequired[i] = "Y";
}
}
}
return null;
} catch ( Exception e ) {
throw new KettleException( e );
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.objc;
import static com.google.devtools.build.lib.collect.nestedset.Order.LINK_ORDER;
import static com.google.devtools.build.lib.collect.nestedset.Order.STABLE_ORDER;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.TransitiveInfoProvider;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.packages.SkylarkClassObject;
import com.google.devtools.build.lib.packages.SkylarkClassObjectConstructor;
import com.google.devtools.build.lib.rules.cpp.CppModuleMap;
import com.google.devtools.build.lib.rules.cpp.LinkerInputs;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory;
import com.google.devtools.build.lib.syntax.EvalUtils;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.xcode.xcodegen.proto.XcodeGenProtos.TargetControl;
import java.util.HashMap;
import java.util.Map;
/**
* A provider that provides all compiling and linking information in the transitive closure of its
* deps that are needed for building Objective-C rules.
*/
@Immutable
@SkylarkModule(
name = "ObjcProvider",
category = SkylarkModuleCategory.PROVIDER,
doc = "A provider for compilation and linking of objc."
)
public final class ObjcProvider extends SkylarkClassObject implements TransitiveInfoProvider {
/**
* The skylark struct key name for a rule implementation to use when exporting an ObjcProvider.
*/
public static final String OBJC_SKYLARK_PROVIDER_NAME = "objc";
/**
* Represents one of the things this provider can provide transitively. Things are provided as
* {@link NestedSet}s of type E.
*/
@Immutable
public static class Key<E> {
private final Order order;
private final String skylarkKeyName;
private final Class<E> type;
private Key(Order order, String skylarkKeyName, Class<E> type) {
this.order = Preconditions.checkNotNull(order);
this.skylarkKeyName = skylarkKeyName;
this.type = type;
}
/**
* Returns the name of the collection represented by this key in the Skylark provider.
*/
public String getSkylarkKeyName() {
return skylarkKeyName;
}
/**
* Returns the type of nested set keyed in the ObjcProvider by this key.
*/
public Class<E> getType() {
return type;
}
}
public static final Key<Artifact> LIBRARY = new Key<>(LINK_ORDER, "library", Artifact.class);
public static final Key<Artifact> IMPORTED_LIBRARY =
new Key<>(LINK_ORDER, "imported_library", Artifact.class);
/**
* J2ObjC JRE emulation libraries and their dependencies. Separate from LIBRARY because these
* dependencies are specified further up the tree from where the dependency actually exists and
* they must be forced to the end of the link order.
*/
public static final Key<Artifact> JRE_LIBRARY =
new Key<>(LINK_ORDER, "jre_library", Artifact.class);
/**
* Single-architecture linked binaries to be combined for the final multi-architecture binary.
*/
public static final Key<Artifact> LINKED_BINARY =
new Key<>(STABLE_ORDER, "linked_binary", Artifact.class);
/** Combined-architecture binaries to include in the final bundle. */
public static final Key<Artifact> MULTI_ARCH_LINKED_BINARIES =
new Key<>(STABLE_ORDER, "combined_arch_linked_binary", Artifact.class);
/** Combined-architecture dynamic libraries to include in the final bundle. */
public static final Key<Artifact> MULTI_ARCH_DYNAMIC_LIBRARIES =
new Key<>(STABLE_ORDER, "combined_arch_dynamic_library", Artifact.class);
/** Combined-architecture archives to include in the final bundle. */
public static final Key<Artifact> MULTI_ARCH_LINKED_ARCHIVES =
new Key<>(STABLE_ORDER, "combined_arch_linked_archive", Artifact.class);
/**
* Indicates which libraries to load with {@code -force_load}. This is a subset of the union of
* the {@link #LIBRARY} and {@link #IMPORTED_LIBRARY} sets.
*/
public static final Key<Artifact> FORCE_LOAD_LIBRARY =
new Key<>(LINK_ORDER, "force_load_library", Artifact.class);
/**
* Libraries to pass with -force_load flags when setting the linkopts in Xcodegen. This is needed
* in addition to {@link #FORCE_LOAD_LIBRARY} because that one, contains a mixture of import
* archives (which are not built by Xcode) and built-from-source library archives (which are built
* by Xcode). Archives that are built by Xcode are placed directly under
* {@code BUILT_PRODUCTS_DIR} while those not built by Xcode appear somewhere in the Bazel
* workspace under {@code WORKSPACE_ROOT}.
*/
public static final Key<String> FORCE_LOAD_FOR_XCODEGEN =
new Key<>(LINK_ORDER, "force_load_for_xcodegen", String.class);
/**
* Contains all header files. These may be either public or private headers.
*/
public static final Key<Artifact> HEADER = new Key<>(STABLE_ORDER, "header", Artifact.class);
/**
* Contains all source files.
*/
public static final Key<Artifact> SOURCE = new Key<>(STABLE_ORDER, "source", Artifact.class);
/**
* Include search paths specified with {@code -I} on the command line. Also known as header search
* paths (and distinct from <em>user</em> header search paths).
*/
public static final Key<PathFragment> INCLUDE =
new Key<>(LINK_ORDER, "include", PathFragment.class);
/**
* Include search paths specified with {@code -isystem} on the command line.
*/
public static final Key<PathFragment> INCLUDE_SYSTEM =
new Key<>(LINK_ORDER, "include_system", PathFragment.class);
/**
* Key for values in {@code defines} attributes. These are passed as {@code -D} flags to all
* invocations of the compiler for this target and all depending targets.
*/
public static final Key<String> DEFINE = new Key<>(STABLE_ORDER, "define", String.class);
public static final Key<Artifact> ASSET_CATALOG =
new Key<>(STABLE_ORDER, "asset_catalog", Artifact.class);
/**
* Added to {@link TargetControl#getGeneralResourceFileList()} when running Xcodegen.
*/
public static final Key<Artifact> GENERAL_RESOURCE_FILE =
new Key<>(STABLE_ORDER, "general_resource_file", Artifact.class);
/**
* Resource directories added to {@link TargetControl#getGeneralResourceFileList()} when running
* Xcodegen. When copying files inside resource directories to the app bundle, XCode will preserve
* the directory structures of the copied files.
*/
public static final Key<PathFragment> GENERAL_RESOURCE_DIR =
new Key<>(STABLE_ORDER, "general_resource_dir", PathFragment.class);
/**
* Exec paths of {@code .bundle} directories corresponding to imported bundles to link.
* These are passed to Xcodegen.
*/
public static final Key<PathFragment> BUNDLE_IMPORT_DIR =
new Key<>(STABLE_ORDER, "bundle_import_dir", PathFragment.class);
/**
* Files that are plopped into the final bundle at some arbitrary bundle path. Note that these are
* not passed to Xcodegen, and these don't include information about where the file originated
* from.
*/
public static final Key<BundleableFile> BUNDLE_FILE =
new Key<>(STABLE_ORDER, "bundle_file", BundleableFile.class);
public static final Key<PathFragment> XCASSETS_DIR =
new Key<>(STABLE_ORDER, "xcassets_dir", PathFragment.class);
public static final Key<String> SDK_DYLIB = new Key<>(STABLE_ORDER, "sdk_dylib", String.class);
public static final Key<SdkFramework> SDK_FRAMEWORK =
new Key<>(STABLE_ORDER, "sdk_framework", SdkFramework.class);
public static final Key<SdkFramework> WEAK_SDK_FRAMEWORK =
new Key<>(STABLE_ORDER, "weak_sdk_framework", SdkFramework.class);
public static final Key<Artifact> XCDATAMODEL =
new Key<>(STABLE_ORDER, "xcdatamodel", Artifact.class);
public static final Key<Flag> FLAG = new Key<>(STABLE_ORDER, "flag", Flag.class);
/**
* Clang module maps, used to enforce proper use of private header files.
*/
public static final Key<Artifact> MODULE_MAP =
new Key<>(STABLE_ORDER, "module_map", Artifact.class);
/**
* Information about this provider's module map, in the form of a {@link CppModuleMap}. This
* is intransitive, and can be used to get just the target's module map to pass to clang or to
* get the module maps for direct but not transitive dependencies. You should only add module maps
* for this key using {@link Builder#addWithoutPropagating}.
*/
public static final Key<CppModuleMap> TOP_LEVEL_MODULE_MAP =
new Key<>(STABLE_ORDER, "top_level_module_map", CppModuleMap.class);
/**
* Merge zips to include in the bundle. The entries of these zip files are included in the final
* bundle with the same path. The entries in the merge zips should not include the bundle root
* path (e.g. {@code Foo.app}).
*/
public static final Key<Artifact> MERGE_ZIP =
new Key<>(STABLE_ORDER, "merge_zip", Artifact.class);
/**
* Merge zips to include in the ipa and outside the bundle root.
*
* e.g. For a bundle Test.ipa, unzipped content will be in:
* Test.ipa/<unzipped>
* Test.ipa/Payload
* Test.ipa/Payload/Test.app
*/
public static final Key<Artifact> ROOT_MERGE_ZIP =
new Key<>(STABLE_ORDER, "root_merge_zip", Artifact.class);
/**
* Exec paths of {@code .framework} directories corresponding to frameworks to link. These cause
* -F arguments (framework search paths) to be added to each compile action, and -framework (link
* framework) arguments to be added to each link action.
*/
public static final Key<PathFragment> FRAMEWORK_DIR =
new Key<>(LINK_ORDER, "framework_dir", PathFragment.class);
/**
* Exec paths of {@code .framework} directories corresponding to frameworks to include in search
* paths, but not to link. These cause -F arguments (framework search paths) to be added to
* each compile action, but do not cause -framework (link framework) arguments to be added to
* link actions.
*/
public static final Key<PathFragment> FRAMEWORK_SEARCH_PATH_ONLY =
new Key<>(LINK_ORDER, "framework_search_paths", PathFragment.class);
/**
* Files in {@code .framework} directories that should be included as inputs when compiling and
* linking.
*/
public static final Key<Artifact> STATIC_FRAMEWORK_FILE =
new Key<>(STABLE_ORDER, "static_framework_file", Artifact.class);
/**
* Files in {@code .framework} directories belonging to a dynamically linked framework. They
* should be included as inputs when compiling and linking as well as copied into the final
* application bundle.
*/
public static final Key<Artifact> DYNAMIC_FRAMEWORK_FILE =
new Key<>(STABLE_ORDER, "dynamic_framework_file", Artifact.class);
/**
* Bundles which should be linked in as a nested bundle to the final application.
*/
public static final Key<Bundling> NESTED_BUNDLE =
new Key<>(STABLE_ORDER, "nested_bundle", Bundling.class);
/**
* Artifact containing information on debug symbols.
*/
public static final Key<Artifact> DEBUG_SYMBOLS =
new Key<>(STABLE_ORDER, "debug_symbols", Artifact.class);
/**
* Artifact containing the plist of the debug symbols.
*/
public static final Key<Artifact> DEBUG_SYMBOLS_PLIST =
new Key<>(STABLE_ORDER, "debug_symbols_plist", Artifact.class);
/**
* Debug artifacts that should be exported by the top-level target.
*/
public static final Key<Artifact> EXPORTED_DEBUG_ARTIFACTS =
new Key<>(STABLE_ORDER, "exported_debug_artifacts", Artifact.class);
/**
* Single-architecture link map for a binary.
*/
public static final Key<Artifact> LINKMAP_FILE =
new Key<>(STABLE_ORDER, "linkmap_file", Artifact.class);
/**
* Artifacts for storyboard sources.
*/
public static final Key<Artifact> STORYBOARD =
new Key<>(STABLE_ORDER, "storyboard", Artifact.class);
/**
* Artifacts for .xib file sources.
*/
public static final Key<Artifact> XIB = new Key<>(STABLE_ORDER, "xib", Artifact.class);
/**
* Artifacts for strings source files.
*/
public static final Key<Artifact> STRINGS = new Key<>(STABLE_ORDER, "strings", Artifact.class);
/**
* Linking information from cc dependencies.
*/
public static final Key<LinkerInputs.LibraryToLink> CC_LIBRARY =
new Key<>(LINK_ORDER, "cc_library", LinkerInputs.LibraryToLink.class);
/**
* Linking options from dependencies.
*/
public static final Key<String> LINKOPT = new Key<>(LINK_ORDER, "linkopt", String.class);
/**
* Link time artifacts from dependencies. These do not fall into any other category such as
* libraries or archives, rather provide a way to add arbitrary data (e.g. Swift AST files)
* to the linker. The rule that adds these is also responsible to add the necessary linker flags
* in {@link #LINKOPT}.
*/
public static final Key<Artifact> LINK_INPUTS =
new Key<>(LINK_ORDER, "link_inputs", Artifact.class);
/** Static libraries that are built from J2ObjC-translated Java code. */
public static final Key<Artifact> J2OBJC_LIBRARY =
new Key<>(LINK_ORDER, "j2objc_library", Artifact.class);
/**
* Flags that apply to a transitive build dependency tree. Each item in the enum corresponds to a
* flag. If the item is included in the key {@link #FLAG}, then the flag is considered set.
*/
public enum Flag {
/**
* Indicates that C++ (or Objective-C++) is used in any source file. This affects how the linker
* is invoked.
*/
USES_CPP,
/**
* Indicates that Swift source files are present. This affects bundling, compiling and linking
* actions.
*/
USES_SWIFT,
/**
* Indicates that a watchOS 1 extension is present in the bundle. (There can only be one
* extension for any given watchOS version in a given bundle).
*/
HAS_WATCH1_EXTENSION,
/**
* Indicates that a watchOS 2 extension is present in the bundle. (There can only be one
* extension for any given watchOS version in a given bundle).
*/
HAS_WATCH2_EXTENSION,
}
private final ImmutableMap<Key<?>, NestedSet<?>> items;
// Items which should not be propagated to dependents.
private final ImmutableMap<Key<?>, NestedSet<?>> nonPropagatedItems;
/** All keys in ObjcProvider that will be passed in the corresponding Skylark provider. */
static final ImmutableList<Key<?>> KEYS_FOR_SKYLARK =
ImmutableList.<Key<?>>of(
ASSET_CATALOG,
BUNDLE_FILE,
BUNDLE_IMPORT_DIR,
DEFINE,
DYNAMIC_FRAMEWORK_FILE,
DEBUG_SYMBOLS,
DEBUG_SYMBOLS_PLIST,
EXPORTED_DEBUG_ARTIFACTS,
FRAMEWORK_DIR,
FRAMEWORK_SEARCH_PATH_ONLY,
FORCE_LOAD_LIBRARY,
GENERAL_RESOURCE_DIR,
GENERAL_RESOURCE_FILE,
HEADER,
IMPORTED_LIBRARY,
INCLUDE,
INCLUDE_SYSTEM,
J2OBJC_LIBRARY,
JRE_LIBRARY,
LIBRARY,
LINK_INPUTS,
LINKED_BINARY,
LINKMAP_FILE,
LINKOPT,
MERGE_ZIP,
MODULE_MAP,
MULTI_ARCH_DYNAMIC_LIBRARIES,
MULTI_ARCH_LINKED_ARCHIVES,
MULTI_ARCH_LINKED_BINARIES,
ROOT_MERGE_ZIP,
SDK_DYLIB,
SDK_FRAMEWORK,
SOURCE,
STATIC_FRAMEWORK_FILE,
STORYBOARD,
STRINGS,
WEAK_SDK_FRAMEWORK,
XCASSETS_DIR,
XCDATAMODEL,
XIB);
/**
* All keys in ObjcProvider that are explicitly not exposed to skylark. This is used for
* testing and verification purposes to ensure that a conscious decision is made for all keys;
* by default, keys should be exposed to skylark: a comment outlining why a key is omitted
* from skylark should follow each such case.
**/
@VisibleForTesting
static final ImmutableList<Key<?>> KEYS_NOT_IN_SKYLARK = ImmutableList.<Key<?>>of(
// LibraryToLink not exposed to skylark.
CC_LIBRARY,
// Xcodegen is deprecated.
FORCE_LOAD_FOR_XCODEGEN,
// Flag enum is not exposed to skylark.
FLAG,
// Bundle not exposed to skylark.
NESTED_BUNDLE,
// CppModuleMap is not exposed to skylark.
TOP_LEVEL_MODULE_MAP);
/**
* Returns the skylark key for the given string, or null if no such key exists or is available
* to Skylark.
*/
static Key<?> getSkylarkKeyForString(String keyName) {
for (Key<?> candidateKey : KEYS_FOR_SKYLARK) {
if (candidateKey.getSkylarkKeyName().equals(keyName)) {
return candidateKey;
}
}
return null;
}
// Items which should be passed to strictly direct dependers, but not transitive dependers.
private final ImmutableMap<Key<?>, NestedSet<?>> strictDependencyItems;
private static final SkylarkClassObjectConstructor OBJC_PROVIDER =
SkylarkClassObjectConstructor.createNative("objc_provider");
private ObjcProvider(
ImmutableMap<Key<?>, NestedSet<?>> items,
ImmutableMap<Key<?>, NestedSet<?>> nonPropagatedItems,
ImmutableMap<Key<?>, NestedSet<?>> strictDependencyItems,
ImmutableMap<String, Object> skylarkFields) {
super(OBJC_PROVIDER, skylarkFields, "ObjcProvider field %s could not be instantiated");
this.items = Preconditions.checkNotNull(items);
this.nonPropagatedItems = Preconditions.checkNotNull(nonPropagatedItems);
this.strictDependencyItems = Preconditions.checkNotNull(strictDependencyItems);
}
@Override
public Concatter getConcatter() {
return null;
}
/**
* All artifacts, bundleable files, etc. of the type specified by {@code key}.
*/
@SuppressWarnings("unchecked")
public <E> NestedSet<E> get(Key<E> key) {
Preconditions.checkNotNull(key);
NestedSetBuilder<E> builder = new NestedSetBuilder<>(key.order);
if (strictDependencyItems.containsKey(key)) {
builder.addTransitive((NestedSet<E>) strictDependencyItems.get(key));
}
if (nonPropagatedItems.containsKey(key)) {
builder.addTransitive((NestedSet<E>) nonPropagatedItems.get(key));
}
if (items.containsKey(key)) {
builder.addTransitive((NestedSet<E>) items.get(key));
}
return builder.build();
}
/**
* All artifacts, bundleable files, etc, that should be propagated to transitive dependers, of
* the type specified by {@code key}.
*/
@SuppressWarnings("unchecked")
public <E> NestedSet<E> getPropagable(Key<E> key) {
Preconditions.checkNotNull(key);
NestedSetBuilder<E> builder = new NestedSetBuilder<>(key.order);
if (items.containsKey(key)) {
builder.addTransitive((NestedSet<E>) items.get(key));
}
return builder.build();
}
/**
* Indicates whether {@code flag} is set on this provider.
*/
public boolean is(Flag flag) {
return Iterables.contains(get(FLAG), flag);
}
/**
* Indicates whether this provider has any asset catalogs. This is true whenever some target in
* its transitive dependency tree specifies a non-empty {@code asset_catalogs} attribute.
*/
public boolean hasAssetCatalogs() {
return !get(XCASSETS_DIR).isEmpty();
}
/** Returns the list of .a files required for linking that arise from objc libraries. */
ImmutableList<Artifact> getObjcLibraries() {
// JRE libraries must be ordered after all regular objc libraries.
NestedSet<Artifact> jreLibs = get(JRE_LIBRARY);
return ImmutableList.<Artifact>builder()
.addAll(Iterables.filter(
get(LIBRARY), Predicates.not(Predicates.in(jreLibs.toSet()))))
.addAll(jreLibs)
.build();
}
/** Returns the list of .a files required for linking that arise from cc libraries. */
ImmutableList<Artifact> getCcLibraries() {
ImmutableList.Builder<Artifact> ccLibraryBuilder = ImmutableList.builder();
for (LinkerInputs.LibraryToLink libraryToLink : get(CC_LIBRARY)) {
ccLibraryBuilder.add(libraryToLink.getArtifact());
}
return ccLibraryBuilder.build();
}
/**
* A builder for this context with an API that is optimized for collecting information from
* several transitive dependencies.
*/
public static final class Builder {
private final Map<Key<?>, NestedSetBuilder<?>> items = new HashMap<>();
private final Map<Key<?>, NestedSetBuilder<?>> nonPropagatedItems = new HashMap<>();
private final Map<Key<?>, NestedSetBuilder<?>> strictDependencyItems = new HashMap<>();
private static void maybeAddEmptyBuilder(Map<Key<?>, NestedSetBuilder<?>> set, Key<?> key) {
if (!set.containsKey(key)) {
set.put(key, new NestedSetBuilder<>(key.order));
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
private void uncheckedAddAll(Key key, Iterable toAdd, Map<Key<?>, NestedSetBuilder<?>> set) {
maybeAddEmptyBuilder(set, key);
set.get(key).addAll(toAdd);
}
@SuppressWarnings({"rawtypes", "unchecked"})
private void uncheckedAddTransitive(Key key, NestedSet toAdd,
Map<Key<?>, NestedSetBuilder<?>> set) {
maybeAddEmptyBuilder(set, key);
set.get(key).addTransitive(toAdd);
}
/**
* Add all elements from providers, and propagate them to any (transitive) dependers on this
* ObjcProvider.
*/
public Builder addTransitiveAndPropagate(Iterable<ObjcProvider> providers) {
for (ObjcProvider provider : providers) {
addTransitiveAndPropagate(provider);
}
return this;
}
/**
* Add all keys and values from provider, and propagate them to any (transitive) dependers on
* this ObjcProvider.
*/
public Builder addTransitiveAndPropagate(ObjcProvider provider) {
for (Map.Entry<Key<?>, NestedSet<?>> typeEntry : provider.items.entrySet()) {
uncheckedAddTransitive(typeEntry.getKey(), typeEntry.getValue(), this.items);
}
for (Map.Entry<Key<?>, NestedSet<?>> typeEntry : provider.strictDependencyItems.entrySet()) {
uncheckedAddTransitive(typeEntry.getKey(), typeEntry.getValue(), this.nonPropagatedItems);
}
return this;
}
/**
* Add all keys and values from the given provider, but propagate any normally-propagated items
* only to direct dependers of this ObjcProvider.
*/
public Builder addAsDirectDeps(ObjcProvider provider) {
for (Map.Entry<Key<?>, NestedSet<?>> typeEntry : provider.items.entrySet()) {
uncheckedAddTransitive(typeEntry.getKey(), typeEntry.getValue(),
this.strictDependencyItems);
}
for (Map.Entry<Key<?>, NestedSet<?>> typeEntry : provider.strictDependencyItems.entrySet()) {
uncheckedAddTransitive(typeEntry.getKey(), typeEntry.getValue(), this.nonPropagatedItems);
}
return this;
}
/**
* Add all elements from a single key of the given provider, and propagate them to any
* (transitive) dependers on this ObjcProvider.
*/
public Builder addTransitiveAndPropagate(Key key, ObjcProvider provider) {
if (provider.items.containsKey(key)) {
uncheckedAddTransitive(key, provider.items.get(key), this.items);
}
if (provider.strictDependencyItems.containsKey(key)) {
uncheckedAddTransitive(
key, provider.strictDependencyItems.get(key), this.nonPropagatedItems);
}
return this;
}
/**
* Adds elements in items, and propagate them to any (transitive) dependers on this
* ObjcProvider.
*/
public <E> Builder addTransitiveAndPropagate(Key<E> key, NestedSet<E> items) {
uncheckedAddTransitive(key, items, this.items);
return this;
}
/**
* Add elements from providers, but don't propagate them to any dependers on this ObjcProvider.
* These elements will be exposed to {@link #get(Key)} calls, but not to any ObjcProviders
* which add this provider to themselves.
*/
public Builder addTransitiveWithoutPropagating(Iterable<ObjcProvider> providers) {
for (ObjcProvider provider : providers) {
addTransitiveWithoutPropagating(provider);
}
return this;
}
/**
* Add all keys and values from provider, without propagating them to any (transitive) dependers
* on this ObjcProvider. These elements will be exposed to {@link #get(Key)} calls, but not to
* any ObjcProviders which add this provider to themselves.
*/
public Builder addTransitiveWithoutPropagating(ObjcProvider provider) {
for (Map.Entry<Key<?>, NestedSet<?>> typeEntry : provider.items.entrySet()) {
uncheckedAddTransitive(typeEntry.getKey(), typeEntry.getValue(), this.nonPropagatedItems);
}
for (Map.Entry<Key<?>, NestedSet<?>> typeEntry : provider.strictDependencyItems.entrySet()) {
uncheckedAddTransitive(typeEntry.getKey(), typeEntry.getValue(), this.nonPropagatedItems);
}
return this;
}
/**
* Add a single key from provider, without propagating them to any (transitive) dependers
* on this ObjcProvider. These elements will be exposed to {@link #get(Key)} calls, but not to
* any ObjcProviders which add this provider to themselves.
*/
public Builder addTransitiveWithoutPropagating(Key key, ObjcProvider provider) {
if (provider.items.containsKey(key)) {
uncheckedAddTransitive(key, provider.items.get(key), this.nonPropagatedItems);
}
if (provider.strictDependencyItems.containsKey(key)) {
uncheckedAddTransitive(
key, provider.strictDependencyItems.get(key), this.nonPropagatedItems);
}
return this;
}
/**
* Adds elements in items, without propagating them to any (transitive) dependers on this
* ObjcProvider.
*/
public <E> Builder addTransitiveWithoutPropagating(Key<E> key, NestedSet<E> items) {
uncheckedAddTransitive(key, items, this.nonPropagatedItems);
return this;
}
/**
* Add element, and propagate it to any (transitive) dependers on this ObjcProvider.
*/
public <E> Builder add(Key<E> key, E toAdd) {
uncheckedAddAll(key, ImmutableList.of(toAdd), this.items);
return this;
}
/**
* Add elements in toAdd, and propagate them to any (transitive) dependers on this ObjcProvider.
*/
public <E> Builder addAll(Key<E> key, Iterable<? extends E> toAdd) {
uncheckedAddAll(key, toAdd, this.items);
return this;
}
/**
* Add element toAdd, and propagate it only to direct dependents of this provider.
*/
public <E> Builder addForDirectDependents(Key<E> key, E toAdd) {
uncheckedAddAll(key, ImmutableList.of(toAdd), this.strictDependencyItems);
return this;
}
/**
* Add elements in toAdd, and propagate them only to direct dependents of this provider.
*/
public <E> Builder addAllForDirectDependents(Key<E> key, Iterable<? extends E> toAdd) {
uncheckedAddAll(key, toAdd, this.strictDependencyItems);
return this;
}
/**
* Add elements in toAdd with the given key from skylark. An error is thrown if toAdd is not
* an appropriate SkylarkNestedSet.
*/
void addElementsFromSkylark(Key<?> key, Object toAdd) {
uncheckedAddAll(key, ObjcProviderSkylarkConverters.convertToJava(key, toAdd), this.items);
}
/**
* Adds the given providers from skylark. An error is thrown if toAdd is not an iterable of
* ObjcProvider instances.
*/
@SuppressWarnings("unchecked")
void addProvidersFromSkylark(Object toAdd) {
if (!(toAdd instanceof Iterable)) {
throw new IllegalArgumentException(
String.format(
AppleSkylarkCommon.BAD_PROVIDERS_ITER_ERROR, EvalUtils.getDataTypeName(toAdd)));
} else {
Iterable<Object> toAddIterable = (Iterable<Object>) toAdd;
for (Object toAddObject : toAddIterable) {
if (!(toAddObject instanceof ObjcProvider)) {
throw new IllegalArgumentException(
String.format(
AppleSkylarkCommon.BAD_PROVIDERS_ELEM_ERROR,
EvalUtils.getDataTypeName(toAddObject)));
} else {
this.addTransitiveAndPropagate((ObjcProvider) toAddObject);
}
}
}
}
/**
* Adds the given providers from skylark, but propagate any normally-propagated items
* only to direct dependers. An error is thrown if toAdd is not an iterable of ObjcProvider
* instances.
*/
@SuppressWarnings("unchecked")
void addDirectDepProvidersFromSkylark(Object toAdd) {
if (!(toAdd instanceof Iterable)) {
throw new IllegalArgumentException(
String.format(
AppleSkylarkCommon.BAD_PROVIDERS_ITER_ERROR, EvalUtils.getDataTypeName(toAdd)));
} else {
Iterable<Object> toAddIterable = (Iterable<Object>) toAdd;
for (Object toAddObject : toAddIterable) {
if (!(toAddObject instanceof ObjcProvider)) {
throw new IllegalArgumentException(
String.format(
AppleSkylarkCommon.BAD_PROVIDERS_ELEM_ERROR,
EvalUtils.getDataTypeName(toAddObject)));
} else {
this.addAsDirectDeps((ObjcProvider) toAddObject);
}
}
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
public ObjcProvider build() {
ImmutableMap.Builder<Key<?>, NestedSet<?>> propagatedBuilder = new ImmutableMap.Builder<>();
for (Map.Entry<Key<?>, NestedSetBuilder<?>> typeEntry : items.entrySet()) {
propagatedBuilder.put(typeEntry.getKey(), typeEntry.getValue().build());
}
ImmutableMap.Builder<Key<?>, NestedSet<?>> nonPropagatedBuilder =
new ImmutableMap.Builder<>();
for (Map.Entry<Key<?>, NestedSetBuilder<?>> typeEntry : nonPropagatedItems.entrySet()) {
nonPropagatedBuilder.put(typeEntry.getKey(), typeEntry.getValue().build());
}
ImmutableMap.Builder<Key<?>, NestedSet<?>> strictDependencyBuilder =
new ImmutableMap.Builder<>();
for (Map.Entry<Key<?>, NestedSetBuilder<?>> typeEntry : strictDependencyItems.entrySet()) {
strictDependencyBuilder.put(typeEntry.getKey(), typeEntry.getValue().build());
}
ImmutableMap<Key<?>, NestedSet<?>> propagated = propagatedBuilder.build();
ImmutableMap<Key<?>, NestedSet<?>> nonPropagated = nonPropagatedBuilder.build();
ImmutableMap<Key<?>, NestedSet<?>> strictDependency = strictDependencyBuilder.build();
ImmutableMap.Builder<String, Object> skylarkFields = new ImmutableMap.Builder<>();
for (Key<?> key : KEYS_FOR_SKYLARK) {
if (propagated.containsKey(key) && strictDependency.containsKey(key)) {
NestedSet<?> union = new NestedSetBuilder(STABLE_ORDER)
.addTransitive(propagated.get(key))
.addTransitive(strictDependency.get(key))
.build();
skylarkFields.put(
key.getSkylarkKeyName(), ObjcProviderSkylarkConverters.convertToSkylark(key, union));
} else if (items.containsKey(key)) {
skylarkFields.put(
key.getSkylarkKeyName(),
ObjcProviderSkylarkConverters.convertToSkylark(key, propagated.get(key)));
} else if (strictDependency.containsKey(key)) {
skylarkFields.put(
key.getSkylarkKeyName(),
ObjcProviderSkylarkConverters.convertToSkylark(key, strictDependency.get(key)));
} else {
skylarkFields.put(
key.getSkylarkKeyName(),
ObjcProviderSkylarkConverters.convertToSkylark(
key, new NestedSetBuilder(STABLE_ORDER).build()));
}
}
return new ObjcProvider(propagated, nonPropagated, strictDependency, skylarkFields.build());
}
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* Testing writing a version 3 {@link HFile}.
*/
@RunWith(Parameterized.class)
@Category({IOTests.class, SmallTests.class})
public class TestHFileWriterV3 {
private static final Log LOG = LogFactory.getLog(TestHFileWriterV3.class);
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
private Configuration conf;
private FileSystem fs;
private boolean useTags;
public TestHFileWriterV3(boolean useTags) {
this.useTags = useTags;
}
@Parameters
public static Collection<Object[]> parameters() {
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
@Before
public void setUp() throws IOException {
conf = TEST_UTIL.getConfiguration();
fs = FileSystem.get(conf);
}
@Test
public void testHFileFormatV3() throws IOException {
testHFileFormatV3Internals(useTags);
}
private void testHFileFormatV3Internals(boolean useTags) throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testHFileFormatV3");
final Compression.Algorithm compressAlgo = Compression.Algorithm.GZ;
final int entryCount = 10000;
writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags);
}
@Test
public void testMidKeyInHFile() throws IOException{
testMidKeyInHFileInternals(useTags);
}
private void testMidKeyInHFileInternals(boolean useTags) throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"testMidKeyInHFile");
Compression.Algorithm compressAlgo = Compression.Algorithm.NONE;
int entryCount = 50000;
writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, true, useTags);
}
private void writeDataAndReadFromHFile(Path hfilePath,
Algorithm compressAlgo, int entryCount, boolean findMidKey, boolean useTags) throws IOException {
HFileContext context = new HFileContextBuilder()
.withBlockSize(4096)
.withIncludesTags(useTags)
.withCompression(compressAlgo).build();
HFile.Writer writer = new HFile.WriterFactory(conf, new CacheConfig(conf))
.withPath(fs, hfilePath)
.withFileContext(context)
.withComparator(CellComparatorImpl.COMPARATOR)
.create();
Random rand = new Random(9713312); // Just a fixed seed.
List<KeyValue> keyValues = new ArrayList<>(entryCount);
for (int i = 0; i < entryCount; ++i) {
byte[] keyBytes = RandomKeyValueUtil.randomOrderedKey(rand, i);
// A random-length random value.
byte[] valueBytes = RandomKeyValueUtil.randomValue(rand);
KeyValue keyValue = null;
if (useTags) {
ArrayList<Tag> tags = new ArrayList<>();
for (int j = 0; j < 1 + rand.nextInt(4); j++) {
byte[] tagBytes = new byte[16];
rand.nextBytes(tagBytes);
tags.add(new ArrayBackedTag((byte) 1, tagBytes));
}
keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP,
valueBytes, tags);
} else {
keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP,
valueBytes);
}
writer.append(keyValue);
keyValues.add(keyValue);
}
// Add in an arbitrary order. They will be sorted lexicographically by
// the key.
writer.appendMetaBlock("CAPITAL_OF_USA", new Text("Washington, D.C."));
writer.appendMetaBlock("CAPITAL_OF_RUSSIA", new Text("Moscow"));
writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));
writer.close();
FSDataInputStream fsdis = fs.open(hfilePath);
long fileSize = fs.getFileStatus(hfilePath).getLen();
FixedFileTrailer trailer =
FixedFileTrailer.readFromStream(fsdis, fileSize);
assertEquals(3, trailer.getMajorVersion());
assertEquals(entryCount, trailer.getEntryCount());
HFileContext meta = new HFileContextBuilder()
.withCompression(compressAlgo)
.withIncludesMvcc(false)
.withIncludesTags(useTags)
.withHBaseCheckSum(true).build();
HFileBlock.FSReader blockReader =
new HFileBlock.FSReaderImpl(fsdis, fileSize, meta);
// Comparator class name is stored in the trailer in version 3.
CellComparator comparator = trailer.createComparator();
HFileBlockIndex.BlockIndexReader dataBlockIndexReader =
new HFileBlockIndex.CellBasedKeyBlockIndexReader(comparator,
trailer.getNumDataIndexLevels());
HFileBlockIndex.BlockIndexReader metaBlockIndexReader =
new HFileBlockIndex.ByteArrayKeyBlockIndexReader(1);
HFileBlock.BlockIterator blockIter = blockReader.blockRange(
trailer.getLoadOnOpenDataOffset(),
fileSize - trailer.getTrailerSize());
// Data index. We also read statistics about the block index written after
// the root level.
dataBlockIndexReader.readMultiLevelIndexRoot(
blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount());
if (findMidKey) {
Cell midkey = dataBlockIndexReader.midkey();
assertNotNull("Midkey should not be null", midkey);
}
// Meta index.
metaBlockIndexReader.readRootIndex(
blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX)
.getByteStream(), trailer.getMetaIndexCount());
// File info
FileInfo fileInfo = new FileInfo();
fileInfo.read(blockIter.nextBlockWithBlockType(BlockType.FILE_INFO).getByteStream());
byte [] keyValueFormatVersion = fileInfo.get(HFileWriterImpl.KEY_VALUE_VERSION);
boolean includeMemstoreTS = keyValueFormatVersion != null &&
Bytes.toInt(keyValueFormatVersion) > 0;
// Counters for the number of key/value pairs and the number of blocks
int entriesRead = 0;
int blocksRead = 0;
long memstoreTS = 0;
// Scan blocks the way the reader would scan them
fsdis.seek(0);
long curBlockPos = 0;
while (curBlockPos <= trailer.getLastDataBlockOffset()) {
HFileBlock block = blockReader.readBlockData(curBlockPos, -1, false, false)
.unpack(context, blockReader);
assertEquals(BlockType.DATA, block.getBlockType());
ByteBuff buf = block.getBufferWithoutHeader();
int keyLen = -1;
while (buf.hasRemaining()) {
keyLen = buf.getInt();
int valueLen = buf.getInt();
byte[] key = new byte[keyLen];
buf.get(key);
byte[] value = new byte[valueLen];
buf.get(value);
byte[] tagValue = null;
if (useTags) {
int tagLen = ((buf.get() & 0xff) << 8) ^ (buf.get() & 0xff);
tagValue = new byte[tagLen];
buf.get(tagValue);
}
if (includeMemstoreTS) {
ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset()
+ buf.position(), buf.remaining());
DataInputStream data_input = new DataInputStream(byte_input);
memstoreTS = WritableUtils.readVLong(data_input);
buf.position(buf.position() + WritableUtils.getVIntSize(memstoreTS));
}
// A brute-force check to see that all keys and values are correct.
KeyValue kv = keyValues.get(entriesRead);
assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
kv.getValueLength()) == 0);
if (useTags) {
assertNotNull(tagValue);
KeyValue tkv = kv;
assertEquals(tagValue.length, tkv.getTagsLength());
assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(),
tkv.getTagsOffset(), tkv.getTagsLength()) == 0);
}
++entriesRead;
}
++blocksRead;
curBlockPos += block.getOnDiskSizeWithHeader();
}
LOG.info("Finished reading: entries=" + entriesRead + ", blocksRead="
+ blocksRead);
assertEquals(entryCount, entriesRead);
// Meta blocks. We can scan until the load-on-open data offset (which is
// the root block index offset in version 2) because we are not testing
// intermediate-level index blocks here.
int metaCounter = 0;
while (fsdis.getPos() < trailer.getLoadOnOpenDataOffset()) {
LOG.info("Current offset: " + fsdis.getPos() + ", scanning until " +
trailer.getLoadOnOpenDataOffset());
HFileBlock block = blockReader.readBlockData(curBlockPos, -1, false, false)
.unpack(context, blockReader);
assertEquals(BlockType.META, block.getBlockType());
Text t = new Text();
ByteBuff buf = block.getBufferWithoutHeader();
if (Writables.getWritable(buf.array(), buf.arrayOffset(), buf.limit(), t) == null) {
throw new IOException("Failed to deserialize block " + this +
" into a " + t.getClass().getSimpleName());
}
Text expectedText =
(metaCounter == 0 ? new Text("Paris") : metaCounter == 1 ? new Text(
"Moscow") : new Text("Washington, D.C."));
assertEquals(expectedText, t);
LOG.info("Read meta block data: " + t);
++metaCounter;
curBlockPos += block.getOnDiskSizeWithHeader();
}
fsdis.close();
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/**
* Request object for method compute.regionInstanceGroupManagers.listManagedInstances. Lists the
* instances in the managed instance group and instances that are scheduled to be created. The list
* includes any current actions that the group has scheduled for its instances.
*/
public final class ListManagedInstancesRegionInstanceGroupManagersHttpRequest
implements ApiMessage {
private final String access_token;
private final String callback;
private final String fields;
private final String filter;
private final String instanceGroupManager;
private final String key;
private final Integer maxResults;
private final String orderBy;
private final String pageToken;
private final String prettyPrint;
private final String quotaUser;
private final String userIp;
private ListManagedInstancesRegionInstanceGroupManagersHttpRequest() {
this.access_token = null;
this.callback = null;
this.fields = null;
this.filter = null;
this.instanceGroupManager = null;
this.key = null;
this.maxResults = null;
this.orderBy = null;
this.pageToken = null;
this.prettyPrint = null;
this.quotaUser = null;
this.userIp = null;
}
private ListManagedInstancesRegionInstanceGroupManagersHttpRequest(
String access_token,
String callback,
String fields,
String filter,
String instanceGroupManager,
String key,
Integer maxResults,
String orderBy,
String pageToken,
String prettyPrint,
String quotaUser,
String userIp) {
this.access_token = access_token;
this.callback = callback;
this.fields = fields;
this.filter = filter;
this.instanceGroupManager = instanceGroupManager;
this.key = key;
this.maxResults = maxResults;
this.orderBy = orderBy;
this.pageToken = pageToken;
this.prettyPrint = prettyPrint;
this.quotaUser = quotaUser;
this.userIp = userIp;
}
@Override
public Object getFieldValue(String fieldName) {
if ("access_token".equals(fieldName)) {
return access_token;
}
if ("callback".equals(fieldName)) {
return callback;
}
if ("fields".equals(fieldName)) {
return fields;
}
if ("filter".equals(fieldName)) {
return filter;
}
if ("instanceGroupManager".equals(fieldName)) {
return instanceGroupManager;
}
if ("key".equals(fieldName)) {
return key;
}
if ("maxResults".equals(fieldName)) {
return maxResults;
}
if ("orderBy".equals(fieldName)) {
return orderBy;
}
if ("pageToken".equals(fieldName)) {
return pageToken;
}
if ("prettyPrint".equals(fieldName)) {
return prettyPrint;
}
if ("quotaUser".equals(fieldName)) {
return quotaUser;
}
if ("userIp".equals(fieldName)) {
return userIp;
}
return null;
}
@Nullable
@Override
public ApiMessage getApiMessageRequestBody() {
return null;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/**
* A filter expression that filters resources listed in the response. The expression must specify
* the field name, a comparison operator, and the value that you want to use for filtering. The
* value must be a string, a number, or a boolean. The comparison operator must be either =, !=,
* >, or <.
*
* <p>For example, if you are filtering Compute Engine instances, you can exclude instances named
* example-instance by specifying name != example-instance.
*
* <p>You can also filter nested fields. For example, you could specify
* scheduling.automaticRestart = false to include instances only if they are not scheduled for
* automatic restarts. You can use filtering on nested fields to filter based on resource labels.
*
* <p>To filter on multiple expressions, provide each separate expression within parentheses. For
* example, (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake"). By default, each
* expression is an AND expression. However, you can include AND and OR expressions explicitly.
* For example, (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND
* (scheduling.automaticRestart = true).
*/
public String getFilter() {
return filter;
}
/**
* The name of the managed instance group. It must have the format
* `{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances`.
* \`{instanceGroupManager}\` must start with a letter, and contain only letters (\`[A-Za-z]\`),
* numbers (\`[0-9]\`), dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes
* (\`~\`), plus (\`+\`) or percent * signs (\`%\`). It must be between 3 and 255 characters
* in length, and it * must not start with \`"goog"\`.
*/
public String getInstanceGroupManager() {
return instanceGroupManager;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/**
* The maximum number of results per page that should be returned. If the number of available
* results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to
* get the next page of results in subsequent list requests. Acceptable values are 0 to 500,
* inclusive. (Default: 500)
*/
public Integer getMaxResults() {
return maxResults;
}
/**
* Sorts list results by a certain order. By default, results are returned in alphanumerical order
* based on the resource name.
*
* <p>You can also sort results in descending order based on the creation timestamp using
* orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in
* reverse chronological order (newest result first). Use this to sort resources like operations
* so that the newest operation is returned first.
*
* <p>Currently, only sorting by name or creationTimestamp desc is supported.
*/
public String getOrderBy() {
return orderBy;
}
/**
* Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list
* request to get the next page of results.
*/
public String getPageToken() {
return pageToken;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
ListManagedInstancesRegionInstanceGroupManagersHttpRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static ListManagedInstancesRegionInstanceGroupManagersHttpRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final ListManagedInstancesRegionInstanceGroupManagersHttpRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new ListManagedInstancesRegionInstanceGroupManagersHttpRequest();
}
public static class Builder {
private String access_token;
private String callback;
private String fields;
private String filter;
private String instanceGroupManager;
private String key;
private Integer maxResults;
private String orderBy;
private String pageToken;
private String prettyPrint;
private String quotaUser;
private String userIp;
Builder() {}
public Builder mergeFrom(ListManagedInstancesRegionInstanceGroupManagersHttpRequest other) {
if (other == ListManagedInstancesRegionInstanceGroupManagersHttpRequest.getDefaultInstance())
return this;
if (other.getAccessToken() != null) {
this.access_token = other.access_token;
}
if (other.getCallback() != null) {
this.callback = other.callback;
}
if (other.getFields() != null) {
this.fields = other.fields;
}
if (other.getFilter() != null) {
this.filter = other.filter;
}
if (other.getInstanceGroupManager() != null) {
this.instanceGroupManager = other.instanceGroupManager;
}
if (other.getKey() != null) {
this.key = other.key;
}
if (other.getMaxResults() != null) {
this.maxResults = other.maxResults;
}
if (other.getOrderBy() != null) {
this.orderBy = other.orderBy;
}
if (other.getPageToken() != null) {
this.pageToken = other.pageToken;
}
if (other.getPrettyPrint() != null) {
this.prettyPrint = other.prettyPrint;
}
if (other.getQuotaUser() != null) {
this.quotaUser = other.quotaUser;
}
if (other.getUserIp() != null) {
this.userIp = other.userIp;
}
return this;
}
Builder(ListManagedInstancesRegionInstanceGroupManagersHttpRequest source) {
this.access_token = source.access_token;
this.callback = source.callback;
this.fields = source.fields;
this.filter = source.filter;
this.instanceGroupManager = source.instanceGroupManager;
this.key = source.key;
this.maxResults = source.maxResults;
this.orderBy = source.orderBy;
this.pageToken = source.pageToken;
this.prettyPrint = source.prettyPrint;
this.quotaUser = source.quotaUser;
this.userIp = source.userIp;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** OAuth 2.0 token for the current user. */
public Builder setAccessToken(String access_token) {
this.access_token = access_token;
return this;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Name of the JavaScript callback function that handles the response. */
public Builder setCallback(String callback) {
this.callback = callback;
return this;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** Selector specifying a subset of fields to include in the response. */
public Builder setFields(String fields) {
this.fields = fields;
return this;
}
/**
* A filter expression that filters resources listed in the response. The expression must
* specify the field name, a comparison operator, and the value that you want to use for
* filtering. The value must be a string, a number, or a boolean. The comparison operator must
* be either =, !=, >, or <.
*
* <p>For example, if you are filtering Compute Engine instances, you can exclude instances
* named example-instance by specifying name != example-instance.
*
* <p>You can also filter nested fields. For example, you could specify
* scheduling.automaticRestart = false to include instances only if they are not scheduled for
* automatic restarts. You can use filtering on nested fields to filter based on resource
* labels.
*
* <p>To filter on multiple expressions, provide each separate expression within parentheses.
* For example, (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake"). By
* default, each expression is an AND expression. However, you can include AND and OR
* expressions explicitly. For example, (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel
* Broadwell") AND (scheduling.automaticRestart = true).
*/
public String getFilter() {
return filter;
}
/**
* A filter expression that filters resources listed in the response. The expression must
* specify the field name, a comparison operator, and the value that you want to use for
* filtering. The value must be a string, a number, or a boolean. The comparison operator must
* be either =, !=, >, or <.
*
* <p>For example, if you are filtering Compute Engine instances, you can exclude instances
* named example-instance by specifying name != example-instance.
*
* <p>You can also filter nested fields. For example, you could specify
* scheduling.automaticRestart = false to include instances only if they are not scheduled for
* automatic restarts. You can use filtering on nested fields to filter based on resource
* labels.
*
* <p>To filter on multiple expressions, provide each separate expression within parentheses.
* For example, (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake"). By
* default, each expression is an AND expression. However, you can include AND and OR
* expressions explicitly. For example, (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel
* Broadwell") AND (scheduling.automaticRestart = true).
*/
public Builder setFilter(String filter) {
this.filter = filter;
return this;
}
/**
* The name of the managed instance group. It must have the format
* `{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances`.
* \`{instanceGroupManager}\` must start with a letter, and contain only letters (\`[A-Za-z]\`),
* numbers (\`[0-9]\`), dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes
* (\`~\`), plus (\`+\`) or percent * signs (\`%\`). It must be between 3 and 255 characters
* in length, and it * must not start with \`"goog"\`.
*/
public String getInstanceGroupManager() {
return instanceGroupManager;
}
/**
* The name of the managed instance group. It must have the format
* `{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances`.
* \`{instanceGroupManager}\` must start with a letter, and contain only letters (\`[A-Za-z]\`),
* numbers (\`[0-9]\`), dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes
* (\`~\`), plus (\`+\`) or percent * signs (\`%\`). It must be between 3 and 255 characters
* in length, and it * must not start with \`"goog"\`.
*/
public Builder setInstanceGroupManager(String instanceGroupManager) {
this.instanceGroupManager = instanceGroupManager;
return this;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public Builder setKey(String key) {
this.key = key;
return this;
}
/**
* The maximum number of results per page that should be returned. If the number of available
* results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to
* get the next page of results in subsequent list requests. Acceptable values are 0 to 500,
* inclusive. (Default: 500)
*/
public Integer getMaxResults() {
return maxResults;
}
/**
* The maximum number of results per page that should be returned. If the number of available
* results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to
* get the next page of results in subsequent list requests. Acceptable values are 0 to 500,
* inclusive. (Default: 500)
*/
public Builder setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
return this;
}
/**
* Sorts list results by a certain order. By default, results are returned in alphanumerical
* order based on the resource name.
*
* <p>You can also sort results in descending order based on the creation timestamp using
* orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in
* reverse chronological order (newest result first). Use this to sort resources like operations
* so that the newest operation is returned first.
*
* <p>Currently, only sorting by name or creationTimestamp desc is supported.
*/
public String getOrderBy() {
return orderBy;
}
/**
* Sorts list results by a certain order. By default, results are returned in alphanumerical
* order based on the resource name.
*
* <p>You can also sort results in descending order based on the creation timestamp using
* orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in
* reverse chronological order (newest result first). Use this to sort resources like operations
* so that the newest operation is returned first.
*
* <p>Currently, only sorting by name or creationTimestamp desc is supported.
*/
public Builder setOrderBy(String orderBy) {
this.orderBy = orderBy;
return this;
}
/**
* Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list
* request to get the next page of results.
*/
public String getPageToken() {
return pageToken;
}
/**
* Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list
* request to get the next page of results.
*/
public Builder setPageToken(String pageToken) {
this.pageToken = pageToken;
return this;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public Builder setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** Alternative to userIp. */
public Builder setQuotaUser(String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
/** IP address of the end user for whom the API call is being made. */
public Builder setUserIp(String userIp) {
this.userIp = userIp;
return this;
}
public ListManagedInstancesRegionInstanceGroupManagersHttpRequest build() {
String missing = "";
if (instanceGroupManager == null) {
missing += " instanceGroupManager";
}
if (!missing.isEmpty()) {
throw new IllegalStateException("Missing required properties:" + missing);
}
return new ListManagedInstancesRegionInstanceGroupManagersHttpRequest(
access_token,
callback,
fields,
filter,
instanceGroupManager,
key,
maxResults,
orderBy,
pageToken,
prettyPrint,
quotaUser,
userIp);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setAccessToken(this.access_token);
newBuilder.setCallback(this.callback);
newBuilder.setFields(this.fields);
newBuilder.setFilter(this.filter);
newBuilder.setInstanceGroupManager(this.instanceGroupManager);
newBuilder.setKey(this.key);
newBuilder.setMaxResults(this.maxResults);
newBuilder.setOrderBy(this.orderBy);
newBuilder.setPageToken(this.pageToken);
newBuilder.setPrettyPrint(this.prettyPrint);
newBuilder.setQuotaUser(this.quotaUser);
newBuilder.setUserIp(this.userIp);
return newBuilder;
}
}
@Override
public String toString() {
return "ListManagedInstancesRegionInstanceGroupManagersHttpRequest{"
+ "access_token="
+ access_token
+ ", "
+ "callback="
+ callback
+ ", "
+ "fields="
+ fields
+ ", "
+ "filter="
+ filter
+ ", "
+ "instanceGroupManager="
+ instanceGroupManager
+ ", "
+ "key="
+ key
+ ", "
+ "maxResults="
+ maxResults
+ ", "
+ "orderBy="
+ orderBy
+ ", "
+ "pageToken="
+ pageToken
+ ", "
+ "prettyPrint="
+ prettyPrint
+ ", "
+ "quotaUser="
+ quotaUser
+ ", "
+ "userIp="
+ userIp
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof ListManagedInstancesRegionInstanceGroupManagersHttpRequest) {
ListManagedInstancesRegionInstanceGroupManagersHttpRequest that =
(ListManagedInstancesRegionInstanceGroupManagersHttpRequest) o;
return Objects.equals(this.access_token, that.getAccessToken())
&& Objects.equals(this.callback, that.getCallback())
&& Objects.equals(this.fields, that.getFields())
&& Objects.equals(this.filter, that.getFilter())
&& Objects.equals(this.instanceGroupManager, that.getInstanceGroupManager())
&& Objects.equals(this.key, that.getKey())
&& Objects.equals(this.maxResults, that.getMaxResults())
&& Objects.equals(this.orderBy, that.getOrderBy())
&& Objects.equals(this.pageToken, that.getPageToken())
&& Objects.equals(this.prettyPrint, that.getPrettyPrint())
&& Objects.equals(this.quotaUser, that.getQuotaUser())
&& Objects.equals(this.userIp, that.getUserIp());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(
access_token,
callback,
fields,
filter,
instanceGroupManager,
key,
maxResults,
orderBy,
pageToken,
prettyPrint,
quotaUser,
userIp);
}
}
| |
/*
* Copyright (c) 2021, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.common.gateway.jwtgenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.nimbusds.jwt.JWTClaimsSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.apimgt.common.gateway.constants.JWTConstants;
import org.wso2.carbon.apimgt.common.gateway.dto.JWTConfigurationDto;
import org.wso2.carbon.apimgt.common.gateway.dto.JWTInfoDto;
import org.wso2.carbon.apimgt.common.gateway.exception.JWTGeneratorException;
import org.wso2.carbon.apimgt.common.gateway.util.JWTUtil;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Abstract class for jwt generation.
*/
public abstract class AbstractAPIMgtGatewayJWTGenerator {
private static final Log log = LogFactory.getLog(AbstractAPIMgtGatewayJWTGenerator.class);
public static final String NONE = "NONE";
public static final String SHA256_WITH_RSA = "SHA256withRSA";
public static final String API_GATEWAY_ID = "wso2.org/products/am";
public JWTConfigurationDto jwtConfigurationDto;
private static volatile long ttl = -1L;
public String dialectURI;
public String signatureAlgorithm;
public AbstractAPIMgtGatewayJWTGenerator() {
}
public void setJWTConfigurationDto(JWTConfigurationDto jwtConfigurationDto) {
this.jwtConfigurationDto = jwtConfigurationDto;
dialectURI = jwtConfigurationDto.getConsumerDialectUri();
if (dialectURI == null) {
dialectURI = "http://wso2.org/claims";
}
signatureAlgorithm = jwtConfigurationDto.getSignatureAlgorithm();
if (signatureAlgorithm == null || !(NONE.equals(signatureAlgorithm)
|| SHA256_WITH_RSA.equals(signatureAlgorithm))) {
signatureAlgorithm = SHA256_WITH_RSA;
}
}
public String generateToken(JWTInfoDto jwtInfoDto) throws JWTGeneratorException {
String jwtHeader = buildHeader();
String jwtBody = buildBody(jwtInfoDto);
String base64UrlEncodedHeader = "";
if (jwtHeader != null) {
base64UrlEncodedHeader = encode(jwtHeader.getBytes(Charset.defaultCharset()));
}
String base64UrlEncodedBody = "";
try {
base64UrlEncodedBody = encode(jwtBody.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
log.debug("Error in encoding jwt body", e);
}
if (SHA256_WITH_RSA.equals(signatureAlgorithm)) {
String assertion = base64UrlEncodedHeader + '.' + base64UrlEncodedBody;
//get the assertion signed
byte[] signedAssertion = signJWT(assertion);
if (log.isDebugEnabled()) {
log.debug("signed assertion value : " + new String(signedAssertion, Charset.defaultCharset()));
}
String base64UrlEncodedAssertion = encode(signedAssertion);
return base64UrlEncodedHeader + '.' + base64UrlEncodedBody + '.' + base64UrlEncodedAssertion;
} else {
return base64UrlEncodedHeader + '.' + base64UrlEncodedBody + '.';
}
}
public String buildHeader() throws JWTGeneratorException {
String jwtHeader = null;
if (NONE.equals(signatureAlgorithm)) {
StringBuilder jwtHeaderBuilder = new StringBuilder();
jwtHeaderBuilder.append("{\"typ\":\"JWT\",");
jwtHeaderBuilder.append("\"alg\":\"");
jwtHeaderBuilder.append(JWTUtil.getJWSCompliantAlgorithmCode(NONE));
jwtHeaderBuilder.append('\"');
jwtHeaderBuilder.append('}');
jwtHeader = jwtHeaderBuilder.toString();
} else if (SHA256_WITH_RSA.equals(signatureAlgorithm)) {
jwtHeader = addCertToHeader();
}
return jwtHeader;
}
public byte[] signJWT(String assertion) throws JWTGeneratorException {
try {
PrivateKey privateKey = jwtConfigurationDto.getPrivateKey();
return JWTUtil.signJwt(assertion, privateKey, signatureAlgorithm);
} catch (Exception e) {
throw new JWTGeneratorException(e);
}
}
/**
* Helper method to add public certificate to JWT_HEADER to signature verification.
*
* @throws JWTGeneratorException
*/
protected String addCertToHeader() throws JWTGeneratorException {
try {
Certificate publicCert = jwtConfigurationDto.getPublicCert();
return JWTUtil.generateHeader(publicCert, signatureAlgorithm);
} catch (Exception e) {
String error = "Error in obtaining keystore";
throw new JWTGeneratorException(error, e);
}
}
public String buildBody(JWTInfoDto jwtInfoDto) {
JWTClaimsSet.Builder jwtClaimSetBuilder = new JWTClaimsSet.Builder();
Map<String, Object> claims = populateStandardClaims(jwtInfoDto);
Map<String, Object> customClaims = populateCustomClaims(jwtInfoDto);
for (Map.Entry<String, Object> claimEntry : customClaims.entrySet()) {
if (!claims.containsKey(claimEntry.getKey())) {
claims.put(claimEntry.getKey(), claimEntry.getValue());
} else {
if (log.isDebugEnabled()) {
log.debug("Claim key " + claimEntry.getKey() + " already exist");
}
}
}
ObjectMapper mapper = new ObjectMapper();
for (Map.Entry<String, Object> claimEntry : claims.entrySet()) {
Object claimVal = claimEntry.getValue();
if (claimVal instanceof String && claimEntry.toString().contains("{")) {
try {
Map<String, String> map = mapper.readValue(claimVal.toString(), Map.class);
jwtClaimSetBuilder.claim(claimEntry.getKey(), map);
} catch (IOException e) {
// Exception isn't thrown in order to generate jwt without claim, even if an error is
// occurred during the retrieving claims.
log.error(String.format("Error while reading claim values for %s", claimVal), e);
}
} else if (claimVal instanceof String && claimVal.toString().contains("[\"")
&& claimVal.toString().contains("\"]")) {
try {
List<String> arrayList = mapper.readValue(claimVal.toString(), List.class);
jwtClaimSetBuilder.claim(claimEntry.getKey(), arrayList);
} catch (IOException e) {
// Exception isn't thrown in order to generate jwt without claim, even if an error is
// occurred during the retrieving claims.
log.error("Error while reading claim values", e);
}
} else if (JWTConstants.EXPIRY_TIME.equals(claimEntry.getKey())) {
jwtClaimSetBuilder.claim(claimEntry.getKey(), new Date(Long.parseLong((String) claimEntry.getValue())));
} else if (JWTConstants.ISSUED_TIME.equals(claimEntry.getKey())) {
jwtClaimSetBuilder.claim(claimEntry.getKey(), new Date(Long.parseLong((String) claimEntry.getValue())));
} else {
jwtClaimSetBuilder.claim(claimEntry.getKey(), claimEntry.getValue());
}
}
//Adding JWT standard claim
jwtClaimSetBuilder.jwtID(UUID.randomUUID().toString());
JWTClaimsSet jwtClaimsSet = jwtClaimSetBuilder.build();
return jwtClaimsSet.toJSONObject().toString();
}
public String encode(byte[] stringToBeEncoded) throws JWTGeneratorException {
return java.util.Base64.getUrlEncoder().encodeToString(stringToBeEncoded);
}
public String getDialectURI() {
return dialectURI;
}
public abstract Map<String, Object> populateStandardClaims(JWTInfoDto jwtInfoDto);
public abstract Map<String, Object> populateCustomClaims(JWTInfoDto jwtInfoDto);
}
| |
/*
* android-plugin-client-sdk-for-locale https://github.com/twofortyfouram/android-plugin-client-sdk-for-locale
* Copyright 2014 two forty four a.m. LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twofortyfouram.locale.sdk.client.ui.activity;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.test.ActivityInstrumentationTestCase2;
import android.test.UiThreadTest;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
import com.twofortyfouram.assertion.Assertions;
import com.twofortyfouram.assertion.BundleAssertions;
import com.twofortyfouram.locale.sdk.client.test.condition.ui.activity.FragmentPluginActivityImpl;
import com.twofortyfouram.locale.sdk.client.test.condition.ui.activity.PluginBundleValues;
import com.twofortyfouram.spackle.bundle.BundleComparer;
import com.twofortyfouram.test.ui.activity.ActivityTestUtil;
import net.jcip.annotations.ThreadSafe;
/**
* Superclass for Activity unit tests that provides facilities to make testing
* easier.
*/
public final class FragmentPluginActivityImplTest extends
ActivityInstrumentationTestCase2<FragmentPluginActivityImpl> {
public FragmentPluginActivityImplTest() {
super(FragmentPluginActivityImpl.class);
}
/**
* Setup that executes before every test case
*/
@Override
protected void setUp() throws Exception {
super.setUp();
/*
* Perform test case specific initialization. This is required to be set
* up here because setActivityIntent() has no effect inside a method
* annotated with @UiThreadTest
*/
final Context context = getInstrumentation().getContext();
if ("testNewCondition_cancel_because_null_bundle".equals(getName())) { //$NON-NLS-1$
setActivityIntent(getDefaultStartIntent(PluginType.CONDITION));
} else if ("testNewSetting_cancel_because_null_bundle".equals(getName())) { //$NON-NLS-1$
setActivityIntent(getDefaultStartIntent(PluginType.SETTING));
} else if ("testNewCondition_save".equals(getName())) { //$NON-NLS-1$
setActivityIntent(getDefaultStartIntent(PluginType.CONDITION));
} else if ("testNewSetting_save".equals(getName())) { //$NON-NLS-1$
setActivityIntent(getDefaultStartIntent(PluginType.SETTING));
} else if ("testOldCondition_save_bundle_and_blurb_changed".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.CONDITION);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldCondition_save_bundle_changed".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.CONDITION);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldCondition_save_blurb_changed".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.CONDITION);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldSetting_save_bundle_and_blurb_changed".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.SETTING);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldSetting_save_bundle_changed".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.SETTING);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldSetting_save_blurb_changed".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.SETTING);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldCondition_diffing_cancel".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.CONDITION);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldSetting_diffing_cancel".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
final Intent i = getDefaultStartIntent(PluginType.SETTING);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldCondition_bad_bundle".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
bundle.putString("extra_key", "extra_value");
final Intent i = getDefaultStartIntent(PluginType.SETTING);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
} else if ("testOldSetting_bad_bundle".equals(getName())) { //$NON-NLS-1$
final Bundle bundle = PluginBundleValues
.generateBundle(context, "some_old_test_value"); //$NON-NLS-1$
bundle.putString("extra_key", "extra_value");
final Intent i = getDefaultStartIntent(PluginType.SETTING);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE, bundle);
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB,
"Some old blurb"); //$NON-NLS-1$
setActivityIntent(i);
}
getActivity();
}
@MediumTest
@UiThreadTest
public void testNewCondition_cancel_because_null_bundle() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(0);
assertOnPostCreateWithPreviousBundleCount(0);
assertNull(activity.getPreviousBundle());
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(0);
assertActivityResult(Activity.RESULT_CANCELED, null, null);
}
@MediumTest
@UiThreadTest
public void testNewSetting_cancel_because_null_bundle() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(0);
assertOnPostCreateWithPreviousBundleCount(0);
assertNull(activity.getPreviousBundle());
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(0);
assertActivityResult(Activity.RESULT_CANCELED, null, null);
}
@MediumTest
@UiThreadTest
public void testNewCondition_save() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(0);
assertOnPostCreateWithPreviousBundleCount(0);
assertNull(activity.getPreviousBundle());
final Bundle bundle = PluginBundleValues.generateBundle(getInstrumentation().getContext(),
"some_new_test_value"); //$NON-NLS-1$
final String blurb = "Some new blurb"; //$NON-NLS-1$
setActivityBundleAndBlurb(bundle, blurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, bundle, blurb);
}
@MediumTest
@UiThreadTest
public void testNewSetting_save() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(0);
assertOnPostCreateWithPreviousBundleCount(0);
assertNull(activity.getPreviousBundle());
final Bundle bundle = PluginBundleValues.generateBundle(getInstrumentation().getContext(),
"some_new_test_value"); //$NON-NLS-1$
final String blurb = "Some new blurb"; //$NON-NLS-1$
setActivityBundleAndBlurb(bundle, blurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, bundle, blurb);
}
@MediumTest
@UiThreadTest
public void testOldCondition_save_bundle_and_blurb_changed() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
assertEquals("Some old blurb", activity.getPreviousBlurb()); //$NON-NLS-1$
final Bundle newBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(), "Some new blurb"); //$NON-NLS-1$
final String newBlurb = "Some new blurb"; //$NON-NLS-1$
setActivityBundleAndBlurb(newBundle, newBlurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, newBundle, newBlurb);
}
@MediumTest
@UiThreadTest
public void testOldCondition_save_bundle_changed() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
final String oldBlurb = "Some old blurb"; //$NON-NLS-1$
assertEquals(oldBlurb, activity.getPreviousBlurb()); //$NON-NLS-1$
final Bundle newBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(), "some_new_test_value"); //$NON-NLS-1$
setActivityBundleAndBlurb(newBundle, oldBlurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, newBundle, oldBlurb);
}
@MediumTest
@UiThreadTest
public void testOldCondition_save_blurb_changed() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
assertEquals("Some old blurb", activity.getPreviousBlurb()); //$NON-NLS-1$
final String newBlurb = "Some new blurb"; //$NON-NLS-1$
setActivityBundleAndBlurb(oldBundle, newBlurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, oldBundle, newBlurb);
}
@MediumTest
@UiThreadTest
public void testOldSetting_save_bundle_and_blurb_changed() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
assertEquals("Some old blurb", activity.getPreviousBlurb()); //$NON-NLS-1$
final Bundle newBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(), "some_new_test_value"); //$NON-NLS-1$
final String newBlurb = "Some new blurb"; //$NON-NLS-1$
setActivityBundleAndBlurb(newBundle, newBlurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, newBundle, newBlurb);
}
@MediumTest
@UiThreadTest
public void testOldSetting_save_bundle_changed() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
final String oldBlurb = "Some old blurb"; //$NON-NLS-1$
assertEquals(oldBlurb, activity.getPreviousBlurb()); //$NON-NLS-1$
final Bundle newBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(), "some_new_test_value"); //$NON-NLS-1$
setActivityBundleAndBlurb(newBundle, oldBlurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, newBundle, oldBlurb);
}
@MediumTest
@UiThreadTest
public void testOldSetting_save_blurb_changed() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
assertEquals("Some old blurb", activity.getPreviousBlurb()); //$NON-NLS-1$
final String newBlurb = "Some new blurb"; //$NON-NLS-1$
setActivityBundleAndBlurb(oldBundle, newBlurb);
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_OK, oldBundle, newBlurb);
}
@MediumTest
@UiThreadTest
public void testOldCondition_diffing_cancel() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
assertEquals("Some old blurb", activity.getPreviousBlurb()); //$NON-NLS-1$
setActivityBundleAndBlurb(oldBundle, "Some old blurb"); //$NON-NLS-1$
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_CANCELED, null, null);
}
@MediumTest
@UiThreadTest
public void testOldSetting_diffing_cancel() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(1);
final Bundle oldBundle = PluginBundleValues
.generateBundle(getInstrumentation().getContext(),
"some_old_test_value"); //$NON-NLS-1$
assertTrue(BundleComparer.areBundlesEqual(oldBundle, activity.getPreviousBundle()));
assertEquals("Some old blurb", activity.getPreviousBlurb()); //$NON-NLS-1$
setActivityBundleAndBlurb(oldBundle, "Some old blurb"); //$NON-NLS-1$
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(1);
assertActivityResult(Activity.RESULT_CANCELED, null, null);
}
@MediumTest
@UiThreadTest
public void testOldCondition_bad_bundle() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(0); // This is key for this test!
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(0);
assertActivityResult(Activity.RESULT_CANCELED, null, null);
}
@MediumTest
@UiThreadTest
public void testOldSetting_bad_bundle() {
final FragmentPluginActivityImpl activity = getActivity();
assertIsBundleValidCount(2);
assertOnPostCreateWithPreviousBundleCount(0); // This is key for this test!
activity.finish();
assertGetResultBundleCount(1);
assertGetBlurbCount(0);
assertActivityResult(Activity.RESULT_CANCELED, null, null);
}
/**
* Executes a runnable on the main thread. This method works even if the
* current thread is already the main thread.
*
* @param runnable to execute.
*/
protected final void autoSyncRunnable(final Runnable runnable) {
if (Looper.getMainLooper() == Looper.myLooper()) {
runnable.run();
} else {
getInstrumentation().runOnMainSync(runnable);
getInstrumentation().waitForIdleSync();
}
}
private void setActivityBundleAndBlurb(@Nullable final Bundle bundle,
@Nullable final String blurb) {
FragmentPluginActivityImpl activity = getActivity();
activity.mBundle = bundle;
activity.mBlurb = blurb;
}
/**
* Asserts the Activity result is correct.
* <p/>
* {@link android.app.Activity#finish()} must be called prior to calling this method.
*
* @param bundle The bundle to verify exists. Null indicates that no bundle
* should be present (not that a null bundle should be present).
* @param blurb The blurb to verify exists. Null indicates that no blurb
* should be present (not that a null blurb should be present).
*/
private void assertActivityResult(final int resultCode, @Nullable final Bundle bundle,
@Nullable final String blurb) {
final Activity activity = getActivity();
assertEquals(resultCode, ActivityTestUtil.getActivityResultCodeSync(getInstrumentation(), activity));
if (Activity.RESULT_OK == resultCode) {
final Intent result = ActivityTestUtil.getActivityResultDataSync(getInstrumentation(), activity);
assertNotNull(result);
final Bundle extras = result.getExtras();
BundleAssertions.assertKeyCount(extras, 2);
BundleAssertions.assertHasString(extras,
com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BLURB, blurb);
final Bundle pluginBundle = extras
.getBundle(com.twofortyfouram.locale.api.Intent.EXTRA_BUNDLE);
assertTrue(BundleComparer.areBundlesEqual(bundle, pluginBundle));
} else if (Activity.RESULT_CANCELED == resultCode) {
assertNull(ActivityTestUtil.getActivityResultDataSync(getInstrumentation(), activity));
}
}
/**
* @param expectedCount Expected number of calls to
* {@link com.twofortyfouram.locale.sdk.client.ui.activity.AbstractPluginActivity#getResultBlurb(android.os.Bundle)}}.
*/
private void assertGetBlurbCount(final int expectedCount) {
final FragmentPluginActivityImpl activity = getActivity();
assertEquals(expectedCount, activity.mGetBlurbCount.get());
}
/**
* @param expectedCount Expected number of calls to
* {@link com.twofortyfouram.locale.sdk.client.ui.activity.AbstractPluginActivity#getResultBundle()}.
*/
private void assertGetResultBundleCount(final int expectedCount) {
final FragmentPluginActivityImpl activity = getActivity();
assertEquals(expectedCount, activity.mGetResultBundleCount.get());
}
/**
* @param expectedCount Expected number of calls to
* {@link com.twofortyfouram.locale.sdk.client.ui.activity.AbstractPluginActivity#isBundleValid(android.os.Bundle)}}
* .
*/
private void assertIsBundleValidCount(final int expectedCount) {
final FragmentPluginActivityImpl activity = getActivity();
assertEquals(expectedCount, activity.mIsBundleValidCount.get());
}
/**
* @param expectedCount Expected number of calls to
* {@link com.twofortyfouram.locale.sdk.client.ui.activity.AbstractPluginActivity#onPostCreateWithPreviousBundle(android.os.Bundle)}
* .
*/
private void assertOnPostCreateWithPreviousBundleCount(final int expectedCount) {
final FragmentPluginActivityImpl activity = getActivity();
assertEquals(expectedCount, activity.mOnPostCreateWithPreviousBundleCount.get());
}
/**
* @param type Plug-in type.
* @return The default Intent to start the plug-in Activity. The Intent will
* contain
* {@link com.twofortyfouram.locale.api.Intent#EXTRA_STRING_BREADCRUMB}
* .
*/
@NonNull
private static Intent getDefaultStartIntent(@NonNull final PluginType type) {
Assertions.assertNotNull(type, "type"); //$NON-NLS-1$
final Intent i = new Intent(type.getActivityIntentAction());
i.putExtra(com.twofortyfouram.locale.api.Intent.EXTRA_STRING_BREADCRUMB,
"Edit Situation"); //$NON-NLS-1$
return i;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
public class RemoteClusterServiceTests extends ESTestCase {
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
private MockTransportService startTransport(String id, List<DiscoveryNode> knownNodes, Version version) {
return RemoteClusterConnectionTests.startTransport(id, knownNodes, version, threadPool);
}
public void testSettingsAreRegistered() {
assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(RemoteClusterService.REMOTE_CLUSTERS_SEEDS));
assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(RemoteClusterService.REMOTE_CONNECTIONS_PER_CLUSTER));
assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING));
assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(RemoteClusterService.REMOTE_NODE_ATTRIBUTE));
}
public void testRemoteClusterSeedSetting() {
// simple validation
Settings settings = Settings.builder()
.put("search.remote.foo.seeds", "192.168.0.1:8080")
.put("search.remote.bar.seed", "[::1]:9090").build();
RemoteClusterService.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings).forEach(setting -> setting.get(settings));
Settings brokenSettings = Settings.builder()
.put("search.remote.foo.seeds", "192.168.0.1").build();
expectThrows(IllegalArgumentException.class, () ->
RemoteClusterService.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(brokenSettings).forEach(setting -> setting.get(brokenSettings)));
}
public void testBuiltRemoteClustersSeeds() throws Exception {
Map<String, List<DiscoveryNode>> map = RemoteClusterService.buildRemoteClustersSeeds(
Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080").put("search.remote.bar.seeds", "[::1]:9090").build());
assertEquals(2, map.size());
assertTrue(map.containsKey("foo"));
assertTrue(map.containsKey("bar"));
assertEquals(1, map.get("foo").size());
assertEquals(1, map.get("bar").size());
DiscoveryNode foo = map.get("foo").get(0);
assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080)));
assertEquals(foo.getId(), "foo#192.168.0.1:8080");
assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
DiscoveryNode bar = map.get("bar").get(0);
assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090)));
assertEquals(bar.getId(), "bar#[::1]:9090");
assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
}
public void testGroupClusterIndices() throws IOException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("cluster_1_node", knownNodes, Version.CURRENT);
MockTransportService otherSeedTransport = startTransport("cluster_2_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode otherSeedNode = otherSeedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(otherSeedTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool,
null)) {
transportService.start();
transportService.acceptIncomingRequests();
Settings.Builder builder = Settings.builder();
builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
assertFalse(service.isCrossClusterSearchEnabled());
service.initializeRemoteClusters();
assertTrue(service.isCrossClusterSearchEnabled());
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
assertFalse(service.isRemoteClusterRegistered("foo"));
Map<String, List<String>> perClusterIndices = service.groupClusterIndices(new String[]{"foo:bar", "cluster_1:bar",
"cluster_2:foo:bar", "cluster_1:test", "cluster_2:foo*", "foo"}, i -> false);
String[] localIndices = perClusterIndices.computeIfAbsent(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY,
k -> Collections.emptyList()).toArray(new String[0]);
assertNotNull(perClusterIndices.remove(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY));
assertArrayEquals(new String[]{"foo:bar", "foo"}, localIndices);
assertEquals(2, perClusterIndices.size());
assertEquals(Arrays.asList("bar", "test"), perClusterIndices.get("cluster_1"));
assertEquals(Arrays.asList("foo:bar", "foo*"), perClusterIndices.get("cluster_2"));
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
service.groupClusterIndices(new String[]{"foo:bar", "cluster_1:bar",
"cluster_2:foo:bar", "cluster_1:test", "cluster_2:foo*", "foo"}, i -> "cluster_1:bar".equals(i)));
assertEquals("Can not filter indices; index cluster_1:bar exists but there is also a remote cluster named:" +
" cluster_1", iae.getMessage());
}
}
}
}
public void testIncrementallyAddClusters() throws IOException {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (MockTransportService seedTransport = startTransport("cluster_1_node", knownNodes, Version.CURRENT);
MockTransportService otherSeedTransport = startTransport("cluster_2_node", knownNodes, Version.CURRENT)) {
DiscoveryNode seedNode = seedTransport.getLocalDiscoNode();
DiscoveryNode otherSeedNode = otherSeedTransport.getLocalDiscoNode();
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(otherSeedTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
try (MockTransportService transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool,
null)) {
transportService.start();
transportService.acceptIncomingRequests();
Settings.Builder builder = Settings.builder();
builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) {
assertFalse(service.isCrossClusterSearchEnabled());
service.initializeRemoteClusters();
assertFalse(service.isCrossClusterSearchEnabled());
service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().address()));
assertTrue(service.isCrossClusterSearchEnabled());
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().address()));
assertTrue(service.isCrossClusterSearchEnabled());
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
service.updateRemoteCluster("cluster_2", Collections.emptyList());
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
() -> service.updateRemoteCluster(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY, Collections.emptyList()));
assertEquals("remote clusters must not have the empty string as its key", iae.getMessage());
}
}
}
}
public void testProcessRemoteShards() throws IOException {
try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, null)) {
assertFalse(service.isCrossClusterSearchEnabled());
List<ShardIterator> iteratorList = new ArrayList<>();
Map<String, ClusterSearchShardsResponse> searchShardsResponseMap = new HashMap<>();
DiscoveryNode[] nodes = new DiscoveryNode[] {
new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT),
new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)
};
Map<String, AliasFilter> indicesAndAliases = new HashMap<>();
indicesAndAliases.put("foo", new AliasFilter(new TermsQueryBuilder("foo", "bar"), Strings.EMPTY_ARRAY));
indicesAndAliases.put("bar", new AliasFilter(new MatchAllQueryBuilder(), Strings.EMPTY_ARRAY));
ClusterSearchShardsGroup[] groups = new ClusterSearchShardsGroup[] {
new ClusterSearchShardsGroup(new ShardId("foo", "foo_id", 0),
new ShardRouting[] {TestShardRouting.newShardRouting("foo", 0, "node1", true, ShardRoutingState.STARTED),
TestShardRouting.newShardRouting("foo", 0, "node2", false, ShardRoutingState.STARTED)}),
new ClusterSearchShardsGroup(new ShardId("foo", "foo_id", 1),
new ShardRouting[] {TestShardRouting.newShardRouting("foo", 0, "node1", true, ShardRoutingState.STARTED),
TestShardRouting.newShardRouting("foo", 1, "node2", false, ShardRoutingState.STARTED)}),
new ClusterSearchShardsGroup(new ShardId("bar", "bar_id", 0),
new ShardRouting[] {TestShardRouting.newShardRouting("bar", 0, "node2", true, ShardRoutingState.STARTED),
TestShardRouting.newShardRouting("bar", 0, "node1", false, ShardRoutingState.STARTED)})
};
searchShardsResponseMap.put("test_cluster_1", new ClusterSearchShardsResponse(groups, nodes, indicesAndAliases));
Map<String, AliasFilter> remoteAliases = new HashMap<>();
service.processRemoteShards(searchShardsResponseMap, iteratorList, remoteAliases);
assertEquals(3, iteratorList.size());
for (ShardIterator iterator : iteratorList) {
if (iterator.shardId().getIndexName().endsWith("foo")) {
assertTrue(iterator.shardId().getId() == 0 || iterator.shardId().getId() == 1);
assertEquals("test_cluster_1:foo", iterator.shardId().getIndexName());
ShardRouting shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "foo");
shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "foo");
assertNull(iterator.nextOrNull());
} else {
assertEquals(0, iterator.shardId().getId());
assertEquals("test_cluster_1:bar", iterator.shardId().getIndexName());
ShardRouting shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "bar");
shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "bar");
assertNull(iterator.nextOrNull());
}
}
assertEquals(2, remoteAliases.size());
assertTrue(remoteAliases.toString(), remoteAliases.containsKey("foo_id"));
assertTrue(remoteAliases.toString(), remoteAliases.containsKey("bar_id"));
assertEquals(new TermsQueryBuilder("foo", "bar"), remoteAliases.get("foo_id").getQueryBuilder());
assertEquals(new MatchAllQueryBuilder(), remoteAliases.get("bar_id").getQueryBuilder());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClientWithLocalCache;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.NotificationEvent;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.messaging.EventMessage;
import org.apache.hadoop.hive.metastore.messaging.json.gzip.GzipJSONMessageEncoder;
import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore;
import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection;
import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.CallerArguments;
import org.apache.hadoop.hive.shims.Utils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.Assert;
import java.io.IOException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import static org.apache.hadoop.hive.metastore.ReplChangeManager.SOURCE_OF_REPLICATION;
/**
* Tests for statistics replication.
*/
public class TestStatsReplicationScenarios {
@Rule
public final TestName testName = new TestName();
protected static final Logger LOG = LoggerFactory.getLogger(TestReplicationScenarios.class);
static WarehouseInstance primary;
private static WarehouseInstance replica;
private String primaryDbName, replicatedDbName;
private static HiveConf conf;
private static boolean hasAutogather;
enum AcidTableKind {
FULL_ACID,
INSERT_ONLY
}
private static AcidTableKind acidTableKindToUse;
@BeforeClass
public static void classLevelSetup() throws Exception {
Map<String, String> overrides = new HashMap<>();
overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(),
GzipJSONMessageEncoder.class.getCanonicalName());
internalBeforeClassSetup(overrides, overrides, TestReplicationScenarios.class, true, null);
}
static void internalBeforeClassSetup(Map<String, String> primaryOverrides,
Map<String, String> replicaOverrides, Class clazz,
boolean autogather, AcidTableKind acidTableKind)
throws Exception {
conf = new HiveConf(clazz);
conf.set("dfs.client.use.datanode.hostname", "true");
conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*");
MiniDFSCluster miniDFSCluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).format(true).build();
Map<String, String> additionalOverrides = new HashMap<String, String>() {{
put("fs.defaultFS", miniDFSCluster.getFileSystem().getUri().toString());
put(HiveConf.ConfVars.HIVE_IN_TEST_REPL.varname, "true");
put(HiveConf.ConfVars.REPL_DATA_COPY_LAZY.varname, "false");
}};
Map<String, String> replicatedOverrides = new HashMap<>();
replicatedOverrides.putAll(additionalOverrides);
replicatedOverrides.putAll(replicaOverrides);
// Run with autogather false on primary if requested
Map<String, String> sourceOverrides = new HashMap<>();
hasAutogather = autogather;
additionalOverrides.put(HiveConf.ConfVars.HIVESTATSAUTOGATHER.varname,
autogather ? "true" : "false");
sourceOverrides.putAll(additionalOverrides);
sourceOverrides.putAll(primaryOverrides);
primary = new WarehouseInstance(LOG, miniDFSCluster, sourceOverrides);
replicatedOverrides.put(MetastoreConf.ConfVars.REPLDIR.getHiveName(), primary.repldDir);
replica = new WarehouseInstance(LOG, miniDFSCluster, replicatedOverrides);
// Use transactional tables
acidTableKindToUse = acidTableKind;
}
@AfterClass
public static void classLevelTearDown() throws IOException {
primary.close();
replica.close();
}
@Before
public void setup() throws Throwable {
// set up metastore client cache
if (conf.getBoolVar(HiveConf.ConfVars.MSC_CACHE_ENABLED)) {
HiveMetaStoreClientWithLocalCache.init();
}
primaryDbName = testName.getMethodName() + "_" + +System.currentTimeMillis();
replicatedDbName = "replicated_" + primaryDbName;
primary.run("create database " + primaryDbName + " WITH DBPROPERTIES ( '" +
SOURCE_OF_REPLICATION + "' = '1,2,3')");
}
@After
public void tearDown() throws Throwable {
primary.run("drop database if exists " + primaryDbName + " cascade");
replica.run("drop database if exists " + replicatedDbName + " cascade");
}
private Map<String, String> collectStatsParams(Map<String, String> allParams) {
Map<String, String> statsParams = new HashMap<>();
List<String> params = new ArrayList<>(StatsSetupConst.SUPPORTED_STATS);
params.add(StatsSetupConst.COLUMN_STATS_ACCURATE);
for (String param : params) {
String value = allParams.get(param);
if (value != null) {
statsParams.put(param, value);
}
}
return statsParams;
}
private void verifyReplicatedStatsForTable(String tableName) throws Throwable {
// Test column stats
Assert.assertEquals("Mismatching column statistics for table " + tableName,
primary.getTableColumnStatistics(primaryDbName, tableName),
replica.getTableColumnStatistics(replicatedDbName, tableName));
// Test table level stats
Map<String, String> rParams =
collectStatsParams(replica.getTable(replicatedDbName, tableName).getParameters());
Map<String, String> pParams =
collectStatsParams(primary.getTable(primaryDbName, tableName).getParameters());
Assert.assertEquals("Mismatch in stats parameters for table " + tableName, pParams, rParams);
primary.getTable(primaryDbName, tableName).getPartitionKeys();
verifyReplicatedStatsForPartitionsOfTable(tableName);
}
private void verifyReplicatedStatsForPartitionsOfTable(String tableName)
throws Throwable {
// Test partition level stats
List<Partition> pParts = primary.getAllPartitions(primaryDbName, tableName);
if (pParts == null || pParts.isEmpty()) {
// Not a partitioned table, nothing to verify.
return;
}
List<FieldSchema> partKeys = primary.getTable(primaryDbName, tableName).getPartitionKeys();
for (Partition pPart : pParts) {
Partition rPart = replica.getPartition(replicatedDbName, tableName,
pPart.getValues());
Map<String, String> rParams = collectStatsParams(rPart.getParameters());
Map<String, String> pParams = collectStatsParams(pPart.getParameters());
String partName = Warehouse.makePartName(partKeys, pPart.getValues());
Assert.assertEquals("Mismatch in stats parameters for partition " + partName + " of table " + tableName,
pParams, rParams);
// Test partition column stats for the partition
Assert.assertEquals("Mismatching column statistics for partition " + partName + "of table " + tableName,
primary.getPartitionColumnStatistics(primaryDbName, tableName, partName,
StatsSetupConst.getColumnsHavingStats(pParams)),
replica.getPartitionColumnStatistics(replicatedDbName, tableName, partName,
StatsSetupConst.getColumnsHavingStats(rParams)));
}
}
private void verifyNoStatsReplicationForMetadataOnly(String tableName) throws Throwable {
// Test column stats
Assert.assertTrue(replica.getTableColumnStatistics(replicatedDbName, tableName).isEmpty());
// When no data is replicated, the basic stats parameters for table should look as if it's a
// new table created on replica i.e. zero or null.
Map<String, String> rParams =
collectStatsParams(replica.getTable(replicatedDbName, tableName).getParameters());
for (String param : StatsSetupConst.SUPPORTED_STATS) {
String val = rParams.get(param);
Assert.assertTrue("parameter " + param + " of table " + tableName + " is expected to be " +
"null or 0", val == null || val.trim().equals("0"));
}
// As long as the above conditions are met, it doesn't matter whether basic and column stats
// state are set to true or false. If those are false, actual values are immaterial. If they
// are true, the values assured above represent the correct state of no data.
verifyNoPartitionStatsReplicationForMetadataOnly(tableName);
}
private void verifyNoPartitionStatsReplicationForMetadataOnly(String tableName) throws Throwable {
// Test partition level stats
List<Partition> pParts = primary.getAllPartitions(primaryDbName, tableName);
if (pParts == null || pParts.isEmpty()) {
// Not a partitioned table, nothing to verify.
return;
}
// Partitions are not replicated in metadata only replication.
List<Partition> rParts = replica.getAllPartitions(replicatedDbName, tableName);
Assert.assertTrue("Partitions replicated in a metadata only dump",
rParts == null || rParts.isEmpty());
// Test partition column stats for all partitions
Map<String, List<ColumnStatisticsObj>> rPartColStats =
replica.getAllPartitionColumnStatistics(replicatedDbName, tableName);
for (Map.Entry<String, List<ColumnStatisticsObj>> entry: rPartColStats.entrySet()) {
List<ColumnStatisticsObj> colStats = entry.getValue();
Assert.assertTrue(colStats == null || colStats.isEmpty());
}
}
private String getCreateTableProperties() {
if (acidTableKindToUse == AcidTableKind.FULL_ACID) {
return " stored as orc TBLPROPERTIES('transactional'='true')";
}
if (acidTableKindToUse == AcidTableKind.INSERT_ONLY) {
return " TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only')";
}
return "";
}
private List<String> createBootStrapData() throws Throwable {
// Unpartitioned table with data
String simpleTableName = "sTable";
// partitioned table with data
String partTableName = "pTable";
// Unpartitioned table without data during bootstrap and hence no stats
String ndTableName = "ndTable";
// Partitioned table without data during bootstrap and hence no stats.
String ndPartTableName = "ndPTable";
String tblCreateExtra = getCreateTableProperties();
primary.run("use " + primaryDbName)
.run("create table " + simpleTableName + " (id int)" + tblCreateExtra)
.run("insert into " + simpleTableName + " values (1), (2)")
.run("create table " + partTableName + " (place string) partitioned by (country string)"
+ tblCreateExtra)
.run("insert into " + partTableName + " partition(country='india') values ('bangalore')")
.run("insert into " + partTableName + " partition(country='us') values ('austin')")
.run("insert into " + partTableName + " partition(country='france') values ('paris')")
.run("create table " + ndTableName + " (str string)" + tblCreateExtra)
.run("create table " + ndPartTableName + " (val string) partitioned by (pk int)" +
tblCreateExtra);
List<String> tableNames = new ArrayList<>(Arrays.asList(simpleTableName, partTableName,
ndTableName, ndPartTableName));
// Run analyze on each of the tables, if they are not being gathered automatically.
if (!hasAutogather) {
for (String name : tableNames) {
Assert.assertTrue(primary.getTableColumnStatistics(primaryDbName, name).isEmpty());
primary.run("use " + primaryDbName)
.run("analyze table " + name + " compute statistics for columns");
}
}
return tableNames;
}
/**
* Dumps primarydb on primary, loads it on replica as replicadb, verifies that the statistics
* loaded are same as the ones on primary.
* @param tableNames, names of tables on primary expected to be loaded
* @param parallelLoad, if true, parallel bootstrap load is used
* @param metadataOnly, only metadata is dumped and loaded.
* @param lastReplicationId of the last dump, for incremental dump/load
* @param failRetry
* @return lastReplicationId of the dump performed.
*/
private String dumpLoadVerify(List<String> tableNames, String lastReplicationId,
boolean parallelLoad, boolean metadataOnly, boolean failRetry)
throws Throwable {
List<String> withClauseList;
// Parallel load works only for bootstrap.
parallelLoad = parallelLoad && (lastReplicationId == null);
// With clause construction for REPL DUMP command.
if (metadataOnly) {
withClauseList = Collections.singletonList("'hive.repl.dump.metadata.only'='true'");
} else {
withClauseList = Collections.emptyList();
}
// Take dump
WarehouseInstance.Tuple dumpTuple = primary.run("use " + primaryDbName)
.dump(primaryDbName, withClauseList);
// Load, if necessary changing configuration.
if (parallelLoad) {
replica.hiveConf.setBoolVar(HiveConf.ConfVars.EXECPARALLEL, true);
}
// Fail load if for testing failure and retry scenario. Fail the load while setting
// checkpoint for a table in the middle of list of tables.
if (failRetry) {
if (lastReplicationId == null) {
failBootstrapLoad(tableNames.size()/2);
} else {
failIncrementalLoad();
}
}
// Load, possibly a retry
replica.load(replicatedDbName, primaryDbName);
// Metadata load may not load all the events.
if (!metadataOnly) {
replica.run("repl status " + replicatedDbName)
.verifyResult(dumpTuple.lastReplicationId);
}
if (parallelLoad) {
replica.hiveConf.setBoolVar(HiveConf.ConfVars.EXECPARALLEL, false);
}
// Test statistics
for (String name : tableNames) {
if (metadataOnly) {
verifyNoStatsReplicationForMetadataOnly(name);
} else {
verifyReplicatedStatsForTable(name);
}
}
return dumpTuple.lastReplicationId;
}
/**
* Run a bootstrap that will fail.
*/
private void failBootstrapLoad(int failAfterNumTables) throws Throwable {
// fail setting ckpt directory property for the second table so that we test the case when
// bootstrap load fails after some but not all tables are loaded.
BehaviourInjection<CallerArguments, Boolean> callerVerifier
= new BehaviourInjection<CallerArguments, Boolean>() {
int cntTables = 0;
String prevTable = null;
@Nullable
@Override
public Boolean apply(@Nullable CallerArguments args) {
if (prevTable == null ||
!prevTable.equalsIgnoreCase(args.tblName)) {
cntTables++;
}
prevTable = args.tblName;
if (args.dbName.equalsIgnoreCase(replicatedDbName) && cntTables > failAfterNumTables) {
injectionPathCalled = true;
LOG.warn("Verifier - DB : " + args.dbName + " TABLE : " + args.tblName);
return false;
}
return true;
}
};
InjectableBehaviourObjectStore.setAlterTableModifier(callerVerifier);
try {
replica.loadFailure(replicatedDbName, primaryDbName);
callerVerifier.assertInjectionsPerformed(true, false);
} finally {
InjectableBehaviourObjectStore.resetAlterTableModifier();
}
}
private void failIncrementalLoad() throws Throwable {
// fail add notification when second update table stats event is encountered. Thus we
// test successful application as well as failed application of this event.
BehaviourInjection<NotificationEvent, Boolean> callerVerifier
= new BehaviourInjection<NotificationEvent, Boolean>() {
int cntEvents = 0;
@Override
public Boolean apply(NotificationEvent entry) {
cntEvents++;
if (entry.getEventType().equalsIgnoreCase(EventMessage.EventType.UPDATE_TABLE_COLUMN_STAT.toString()) &&
cntEvents > 1) {
injectionPathCalled = true;
LOG.warn("Verifier - DB: " + entry.getDbName()
+ " Table: " + entry.getTableName()
+ " Event: " + entry.getEventType());
return false;
}
return true;
}
};
InjectableBehaviourObjectStore.setAddNotificationModifier(callerVerifier);
try {
replica.loadFailure(replicatedDbName, primaryDbName);
} finally {
InjectableBehaviourObjectStore.resetAddNotificationModifier();
}
callerVerifier.assertInjectionsPerformed(true, false);
// fail add notification when second update partition stats event is encountered. Thus we test
// successful application as well as failed application of this event.
callerVerifier = new BehaviourInjection<NotificationEvent, Boolean>() {
int cntEvents = 0;
@Override
public Boolean apply(NotificationEvent entry) {
cntEvents++;
if (entry.getEventType().equalsIgnoreCase(EventMessage.EventType.UPDATE_PARTITION_COLUMN_STAT.toString()) &&
cntEvents > 1) {
injectionPathCalled = true;
LOG.warn("Verifier - DB: " + entry.getDbName()
+ " Table: " + entry.getTableName()
+ " Event: " + entry.getEventType());
return false;
}
return true;
}
};
InjectableBehaviourObjectStore.setAddNotificationModifier(callerVerifier);
try {
replica.loadFailure(replicatedDbName, primaryDbName);
} finally {
InjectableBehaviourObjectStore.resetAddNotificationModifier();
}
callerVerifier.assertInjectionsPerformed(true, false);
}
private void createIncrementalData(List<String> tableNames) throws Throwable {
// Annotations for this table are same as createBootStrapData
String simpleTableName = "sTable";
String partTableName = "pTable";
String ndTableName = "ndTable";
String ndPartTableName = "ndPTable";
String tblCreateExtra = getCreateTableProperties();
Assert.assertTrue(tableNames.containsAll(Arrays.asList(simpleTableName, partTableName,
ndTableName, ndPartTableName)));
// New tables created during incremental phase and thus loaded with data and stats during
// incremental phase.
String incTableName = "iTable"; // New table
String incPartTableName = "ipTable"; // New partitioned table
primary.run("use " + primaryDbName)
.run("insert into " + simpleTableName + " values (3), (4)")
// new data inserted into table
.run("insert into " + ndTableName + " values ('string1'), ('string2')")
// two partitions changed and one unchanged
.run("insert into " + partTableName + "(country, place) values ('india', 'pune')")
.run("insert into " + partTableName + "(country, place) values ('us', 'chicago')")
// new partition
.run("insert into " + partTableName + "(country, place) values ('australia', 'perth')")
.run("create table " + incTableName + " (config string, enabled boolean)" +
tblCreateExtra)
.run("insert into " + incTableName + " values ('conf1', true)")
.run("insert into " + incTableName + " values ('conf2', false)")
.run("insert into " + ndPartTableName + "(pk, val) values (1, 'one')")
.run("insert into " + ndPartTableName + "(pk, val) values (1, 'another one')")
.run("insert into " + ndPartTableName + "(pk, val) values (2, 'two')")
.run("create table " + incPartTableName +
"(val string) partitioned by (tvalue boolean)" + tblCreateExtra)
.run("insert into " + incPartTableName + "(tvalue, val) values (true, 'true')")
.run("insert into " + incPartTableName + "(tvalue, val) values (false, 'false')");
tableNames.add(incTableName);
tableNames.add(incPartTableName);
// Run analyze on each of the tables, if they are not being gathered automatically.
if (!hasAutogather) {
for (String name : tableNames) {
primary.run("use " + primaryDbName)
.run("analyze table " + name + " compute statistics for columns");
}
}
}
private void applyDMLOperations(List<String> tableNames) throws Throwable {
// Annotations for this table are same as createBootStrapData
String simpleTableName = "sTable";
String partTableName = "pTable";
String ndTableName = "ndTable";
String ndPartTableName = "ndPTable";
String incTableName = "iTable"; // New table
String tblCreateExtra = getCreateTableProperties();
Assert.assertTrue(tableNames.containsAll(Arrays.asList(simpleTableName, partTableName,
ndTableName, ndPartTableName, incTableName)));
String ctasTableName = "ctasTable"; // Table created through CTAS
String ctasPartTableName = "ctasPartTable"; // Table created through CTAS
// Tables created through import
String eximTableName = "eximTable";
String eximPartTableName = "eximPartTable";
// Tables created through load
String loadTableName = "loadTable";
String loadPartTableName = "loadPartTable";
String exportPath = "'hdfs:///tmp/" + primaryDbName + "/" + incTableName + "/'";
String exportPartPath = "'hdfs:///tmp/" + primaryDbName + "/" + partTableName + "/'";
String localDir = "./test.dat";
String inPath = localDir + "/000000_0";
String tableStorage = "";
if (acidTableKindToUse == AcidTableKind.FULL_ACID) {
tableStorage = "stored as orc";
}
primary.run("use " + primaryDbName)
// insert overwrite
.run("insert overwrite table " + simpleTableName + " values (5), (6), (7)")
.run("insert overwrite table " + partTableName + " partition (country='india') " +
" values ('bombay')")
// truncate
.run("truncate table " + ndTableName)
.run("truncate table " + ndPartTableName + " partition (pk=1)")
// CTAS
.run("create table " + ctasTableName + " as select * from " + incTableName)
.run("create table " + ctasPartTableName + " as select * from " + partTableName)
// Import
.run("export table " + partTableName + " to " + exportPartPath)
.run("import table " + eximPartTableName + " from " + exportPartPath)
.run("export table " + incTableName + " to " + exportPath)
.run("import table " + eximTableName + " from " + exportPath)
// load
.run("insert overwrite local directory '" + localDir + "'" + tableStorage + " select " +
"* from " + simpleTableName)
.run("create table " + loadTableName + " (id int)" + tblCreateExtra)
.run("load data local inpath '" + inPath + "' overwrite into table " + loadTableName)
.run("create table " + loadPartTableName + " (id int) partitioned by (key int) " + tblCreateExtra)
.run("load data local inpath '" + inPath + "' overwrite into table "
+ loadPartTableName + " partition (key=1)");
tableNames.add(ctasTableName);
tableNames.add(ctasPartTableName);
tableNames.add(eximTableName);
tableNames.add(eximPartTableName);
tableNames.add(loadTableName);
tableNames.add(loadPartTableName);
// Run analyze on each of the tables, if they are not being gathered automatically.
if (!hasAutogather) {
for (String name : tableNames) {
primary.run("use " + primaryDbName)
.run("analyze table " + name + " compute statistics for columns");
}
}
}
private void applyTransactionalDMLOperations(List<String> tableNames) throws Throwable {
// Annotations for this table are same as createBootStrapData
String partTableName = "pTable";
String ndTableName = "ndTable";
String incTableName = "iTable";
String eximTableName = "eximTable";
String eximPartTableName = "eximPartTable";
Assert.assertTrue(tableNames.containsAll(Arrays.asList(partTableName, ndTableName,
eximPartTableName, eximTableName, incTableName)));
primary.run("update " + partTableName + " set place = 'mumbai' where place = 'bombay'")
.run("delete from " + partTableName + " where place = 'chicago'")
.run("merge into " + eximPartTableName + " as T using " + partTableName + " as U "
+ " on T.country = U.country "
+ " when matched and T.place != U.place then update set place = U.place"
+ " when not matched then insert values (U.country, U.place)")
.run("update " + incTableName + " set enabled = false where config = 'conf1'")
.run("merge into " + eximTableName + " as T using " + incTableName + " as U "
+ " on T.config = U.config"
+ " when matched and T.enabled != U.enabled then update set enabled = U.enabled"
+ " when not matched then insert values (U.config, U.enabled)")
.run("delete from " + ndTableName);
// Run analyze on each of the tables, if they are not being gathered automatically.
if (!hasAutogather) {
for (String name : tableNames) {
primary.run("use " + primaryDbName)
.run("analyze table " + name + " compute statistics for columns");
}
}
}
private void applyDDLOperations(List<String> tableNames) throws Throwable {
// Annotations for this table are same as createBootStrapData
String simpleTableName = "sTable";
String partTableName = "pTable";
String incTableName = "iTable";
String ctasTableName = "ctasTable"; // Table created through CTAS
Assert.assertTrue(tableNames.containsAll(Arrays.asList(simpleTableName, partTableName,
incTableName, ctasTableName)));
String renamedTableName = "rnTable";
primary.run("use " + primaryDbName)
.run("alter table " + simpleTableName + " add columns (val int)")
.run("alter table " + incTableName + " change config configuration string")
.run("alter table " + ctasTableName + " rename to " + renamedTableName)
.run("alter table " + partTableName +
" partition(country='us') rename to partition (country='usa')");
tableNames.remove(ctasTableName);
tableNames.add(renamedTableName);
}
private void testStatsReplicationCommon(boolean parallelBootstrap, boolean metadataOnly,
boolean failRetry) throws Throwable {
List<String> tableNames = createBootStrapData();
String lastReplicationId = dumpLoadVerify(tableNames, null, parallelBootstrap,
metadataOnly, failRetry);
// Incremental dump
createIncrementalData(tableNames);
lastReplicationId = dumpLoadVerify(tableNames, lastReplicationId, parallelBootstrap,
metadataOnly, failRetry);
// Incremental dump with Insert overwrite operation
applyDMLOperations(tableNames);
lastReplicationId = dumpLoadVerify(tableNames, lastReplicationId, parallelBootstrap,
metadataOnly, false);
// Incremental dump with transactional DML operations
if (acidTableKindToUse == AcidTableKind.FULL_ACID) {
applyTransactionalDMLOperations(tableNames);
lastReplicationId = dumpLoadVerify(tableNames, lastReplicationId, parallelBootstrap,
metadataOnly, false);
}
// Incremental dump with DDL operations
applyDDLOperations(tableNames);
lastReplicationId = dumpLoadVerify(tableNames, lastReplicationId, parallelBootstrap,
metadataOnly, false);
}
@Test
public void testNonParallelBootstrapLoad() throws Throwable {
LOG.info("Testing " + testName.getClass().getName() + "." + testName.getMethodName());
testStatsReplicationCommon(false, false, false);
}
@Test
public void testForParallelBootstrapLoad() throws Throwable {
LOG.info("Testing " + testName.getClass().getName() + "." + testName.getMethodName());
testStatsReplicationCommon(true, false, false);
}
@Test
public void testMetadataOnlyDump() throws Throwable {
LOG.info("Testing " + testName.getClass().getName() + "." + testName.getMethodName());
testStatsReplicationCommon(false, true, false);
}
@Test
public void testRetryFailure() throws Throwable {
LOG.info("Testing " + testName.getClass().getName() + "." + testName.getMethodName());
testStatsReplicationCommon(false, false, true);
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFTableFeaturePropApplySetfieldVer13 implements OFTableFeaturePropApplySetfield {
private static final Logger logger = LoggerFactory.getLogger(OFTableFeaturePropApplySetfieldVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int MINIMUM_LENGTH = 4;
private final static List<U32> DEFAULT_OXM_IDS = ImmutableList.<U32>of();
// OF message fields
private final List<U32> oxmIds;
//
// Immutable default instance
final static OFTableFeaturePropApplySetfieldVer13 DEFAULT = new OFTableFeaturePropApplySetfieldVer13(
DEFAULT_OXM_IDS
);
// package private constructor - used by readers, builders, and factory
OFTableFeaturePropApplySetfieldVer13(List<U32> oxmIds) {
if(oxmIds == null) {
throw new NullPointerException("OFTableFeaturePropApplySetfieldVer13: property oxmIds cannot be null");
}
this.oxmIds = oxmIds;
}
// Accessors for OF message fields
@Override
public int getType() {
return 0xe;
}
@Override
public List<U32> getOxmIds() {
return oxmIds;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
public OFTableFeaturePropApplySetfield.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFTableFeaturePropApplySetfield.Builder {
final OFTableFeaturePropApplySetfieldVer13 parentMessage;
// OF message fields
private boolean oxmIdsSet;
private List<U32> oxmIds;
BuilderWithParent(OFTableFeaturePropApplySetfieldVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public int getType() {
return 0xe;
}
@Override
public List<U32> getOxmIds() {
return oxmIds;
}
@Override
public OFTableFeaturePropApplySetfield.Builder setOxmIds(List<U32> oxmIds) {
this.oxmIds = oxmIds;
this.oxmIdsSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFTableFeaturePropApplySetfield build() {
List<U32> oxmIds = this.oxmIdsSet ? this.oxmIds : parentMessage.oxmIds;
if(oxmIds == null)
throw new NullPointerException("Property oxmIds must not be null");
//
return new OFTableFeaturePropApplySetfieldVer13(
oxmIds
);
}
}
static class Builder implements OFTableFeaturePropApplySetfield.Builder {
// OF message fields
private boolean oxmIdsSet;
private List<U32> oxmIds;
@Override
public int getType() {
return 0xe;
}
@Override
public List<U32> getOxmIds() {
return oxmIds;
}
@Override
public OFTableFeaturePropApplySetfield.Builder setOxmIds(List<U32> oxmIds) {
this.oxmIds = oxmIds;
this.oxmIdsSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
//
@Override
public OFTableFeaturePropApplySetfield build() {
List<U32> oxmIds = this.oxmIdsSet ? this.oxmIds : DEFAULT_OXM_IDS;
if(oxmIds == null)
throw new NullPointerException("Property oxmIds must not be null");
return new OFTableFeaturePropApplySetfieldVer13(
oxmIds
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFTableFeaturePropApplySetfield> {
@Override
public OFTableFeaturePropApplySetfield readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 0xe
short type = bb.readShort();
if(type != (short) 0xe)
throw new OFParseError("Wrong type: Expected=0xe(0xe), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
List<U32> oxmIds = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), U32.READER);
OFTableFeaturePropApplySetfieldVer13 tableFeaturePropApplySetfieldVer13 = new OFTableFeaturePropApplySetfieldVer13(
oxmIds
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", tableFeaturePropApplySetfieldVer13);
return tableFeaturePropApplySetfieldVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFTableFeaturePropApplySetfieldVer13Funnel FUNNEL = new OFTableFeaturePropApplySetfieldVer13Funnel();
static class OFTableFeaturePropApplySetfieldVer13Funnel implements Funnel<OFTableFeaturePropApplySetfieldVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFTableFeaturePropApplySetfieldVer13 message, PrimitiveSink sink) {
// fixed value property type = 0xe
sink.putShort((short) 0xe);
// FIXME: skip funnel of length
FunnelUtils.putList(message.oxmIds, sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFTableFeaturePropApplySetfieldVer13> {
@Override
public void write(ChannelBuffer bb, OFTableFeaturePropApplySetfieldVer13 message) {
int startIndex = bb.writerIndex();
// fixed value property type = 0xe
bb.writeShort((short) 0xe);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
ChannelUtils.writeList(bb, message.oxmIds);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFTableFeaturePropApplySetfieldVer13(");
b.append("oxmIds=").append(oxmIds);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFTableFeaturePropApplySetfieldVer13 other = (OFTableFeaturePropApplySetfieldVer13) obj;
if (oxmIds == null) {
if (other.oxmIds != null)
return false;
} else if (!oxmIds.equals(other.oxmIds))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((oxmIds == null) ? 0 : oxmIds.hashCode());
return result;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.net.*;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.CreationTimeAwareFuture;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.dht.AbstractBounds;
import org.apache.cassandra.dht.Bounds;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.io.util.FastByteArrayOutputStream;
import org.apache.cassandra.locator.AbstractReplicationStrategy;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.TokenMetadata;
import org.apache.cassandra.net.*;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.LatencyTracker;
import org.apache.cassandra.utils.Pair;
public class StorageProxy implements StorageProxyMBean
{
private static final Logger logger = LoggerFactory.getLogger(StorageProxy.class);
private static final boolean OPTIMIZE_LOCAL_REQUESTS = true; // set to false to test messagingservice path on single node
// mbean stuff
private static final LatencyTracker readStats = new LatencyTracker();
private static final LatencyTracker rangeStats = new LatencyTracker();
private static final LatencyTracker writeStats = new LatencyTracker();
public static final String UNREACHABLE = "UNREACHABLE";
private static final WritePerformer standardWritePerformer;
private static final WritePerformer counterWritePerformer;
private static final WritePerformer counterWriteOnCoordinatorPerformer;
public static final StorageProxy instance = new StorageProxy();
private static volatile boolean hintedHandoffEnabled = DatabaseDescriptor.hintedHandoffEnabled();
private static volatile int maxHintWindow = DatabaseDescriptor.getMaxHintWindow();
private static volatile int maxHintsInProgress = 1024 * Runtime.getRuntime().availableProcessors();
private static final AtomicInteger hintsInProgress = new AtomicInteger();
private static final AtomicLong totalHints = new AtomicLong();
private StorageProxy() {}
static
{
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(new StorageProxy(), new ObjectName("org.apache.cassandra.db:type=StorageProxy"));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
standardWritePerformer = new WritePerformer()
{
public void apply(IMutation mutation,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException, TimeoutException
{
assert mutation instanceof RowMutation;
sendToHintedEndpoints((RowMutation) mutation, targets, responseHandler, localDataCenter, consistency_level);
}
};
/*
* We execute counter writes in 2 places: either directly in the coordinator node if it is a replica, or
* in CounterMutationVerbHandler on a replica othewise. The write must be executed on the MUTATION stage
* but on the latter case, the verb handler already run on the MUTATION stage, so we must not execute the
* underlying on the stage otherwise we risk a deadlock. Hence two different performer.
*/
counterWritePerformer = new WritePerformer()
{
public void apply(IMutation mutation,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException
{
if (logger.isDebugEnabled())
logger.debug("insert writing local & replicate " + mutation.toString(true));
Runnable runnable = counterWriteTask(mutation, targets, responseHandler, localDataCenter, consistency_level);
runnable.run();
}
};
counterWriteOnCoordinatorPerformer = new WritePerformer()
{
public void apply(IMutation mutation,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException
{
if (logger.isDebugEnabled())
logger.debug("insert writing local & replicate " + mutation.toString(true));
Runnable runnable = counterWriteTask(mutation, targets, responseHandler, localDataCenter, consistency_level);
StageManager.getStage(Stage.MUTATION).execute(runnable);
}
};
}
/**
* Use this method to have these Mutations applied
* across all replicas. This method will take care
* of the possibility of a replica being down and hint
* the data across to some other replica.
*
* @param mutations the mutations to be applied across the replicas
* @param consistency_level the consistency level for the operation
*/
public static void mutate(List<? extends IMutation> mutations, ConsistencyLevel consistency_level) throws UnavailableException, TimeoutException
{
logger.debug("Mutations/ConsistencyLevel are {}/{}", mutations, consistency_level);
final String localDataCenter = DatabaseDescriptor.getEndpointSnitch().getDatacenter(FBUtilities.getBroadcastAddress());
long startTime = System.nanoTime();
List<IWriteResponseHandler> responseHandlers = new ArrayList<IWriteResponseHandler>();
IMutation mostRecentMutation = null;
try
{
for (IMutation mutation : mutations)
{
mostRecentMutation = mutation;
if (mutation instanceof CounterMutation)
{
responseHandlers.add(mutateCounter((CounterMutation)mutation, localDataCenter));
}
else
{
responseHandlers.add(performWrite(mutation, consistency_level, localDataCenter, standardWritePerformer));
}
}
// wait for writes. throws TimeoutException if necessary
for (IWriteResponseHandler responseHandler : responseHandlers)
{
responseHandler.get();
}
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
{
List<String> mstrings = new ArrayList<String>();
for (IMutation mutation : mutations)
mstrings.add(mutation.toString(true));
logger.debug("Write timeout {} for one (or more) of: ", ex.toString(), mstrings);
}
throw ex;
}
catch (IOException e)
{
assert mostRecentMutation != null;
throw new RuntimeException("error writing key " + ByteBufferUtil.bytesToHex(mostRecentMutation.key()), e);
}
finally
{
writeStats.addNano(System.nanoTime() - startTime);
}
}
/**
* Perform the write of a mutation given a WritePerformer.
* Gather the list of write endpoints, apply locally and/or forward the mutation to
* said write endpoint (deletaged to the actual WritePerformer) and wait for the
* responses based on consistency level.
*
* @param mutation the mutation to be applied
* @param consistency_level the consistency level for the write operation
* @param performer the WritePerformer in charge of appliying the mutation
* given the list of write endpoints (either standardWritePerformer for
* standard writes or counterWritePerformer for counter writes).
*/
public static IWriteResponseHandler performWrite(IMutation mutation,
ConsistencyLevel consistency_level,
String localDataCenter,
WritePerformer performer)
throws UnavailableException, TimeoutException, IOException
{
String table = mutation.getTable();
AbstractReplicationStrategy rs = Table.open(table).getReplicationStrategy();
Collection<InetAddress> writeEndpoints = getWriteEndpoints(table, mutation.key());
IWriteResponseHandler responseHandler = rs.getWriteResponseHandler(writeEndpoints, consistency_level);
// exit early if we can't fulfill the CL at this time
responseHandler.assureSufficientLiveNodes();
performer.apply(mutation, writeEndpoints, responseHandler, localDataCenter, consistency_level);
return responseHandler;
}
private static Collection<InetAddress> getWriteEndpoints(String table, ByteBuffer key)
{
StorageService ss = StorageService.instance;
List<InetAddress> naturalEndpoints = ss.getNaturalEndpoints(table, key);
return ss.getTokenMetadata().getWriteEndpoints(StorageService.getPartitioner().getToken(key), table, naturalEndpoints);
}
/**
* Send the mutations to the right targets, write it locally if it corresponds or writes a hint when the node
* is not available.
*
* Note about hints:
*
* | Hinted Handoff | Consist. Level |
* | on | >=1 | --> wait for hints. We DO NOT notify the handler with handler.response() for hints;
* | on | ANY | --> wait for hints. Responses count towards consistency.
* | off | >=1 | --> DO NOT fire hints. And DO NOT wait for them to complete.
* | off | ANY | --> DO NOT fire hints. And DO NOT wait for them to complete.
*
* @throws TimeoutException if the hints cannot be written/enqueued
*/
private static void sendToHintedEndpoints(final RowMutation rm,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException, TimeoutException
{
// Multimap that holds onto all the messages and addresses meant for a specific datacenter
Map<String, Multimap<Message, InetAddress>> dcMessages = new HashMap<String, Multimap<Message, InetAddress>>(targets.size());
MessageProducer producer = new CachingMessageProducer(rm);
for (InetAddress destination : targets)
{
if (FailureDetector.instance.isAlive(destination))
{
String dc = DatabaseDescriptor.getEndpointSnitch().getDatacenter(destination);
if (destination.equals(FBUtilities.getBroadcastAddress()) && OPTIMIZE_LOCAL_REQUESTS)
{
insertLocal(rm, responseHandler);
}
else
{
// belongs on a different server
if (logger.isDebugEnabled())
logger.debug("insert writing key " + ByteBufferUtil.bytesToHex(rm.key()) + " to " + destination);
Multimap<Message, InetAddress> messages = dcMessages.get(dc);
if (messages == null)
{
messages = HashMultimap.create();
dcMessages.put(dc, messages);
}
messages.put(producer.getMessage(Gossiper.instance.getVersion(destination)), destination);
}
}
else
{
if (!shouldHint(destination))
continue;
// Avoid OOMing from hints waiting to be written. (Unlike ordinary mutations, hint
// not eligible to drop if we fall behind.)
if (hintsInProgress.get() > maxHintsInProgress)
throw new TimeoutException();
// Schedule a local hint and let the handler know it needs to wait for the hint to complete too
Future<Void> hintfuture = scheduleLocalHint(rm, destination, responseHandler, consistency_level);
responseHandler.addFutureForHint(new CreationTimeAwareFuture<Void>(hintfuture));
}
}
sendMessages(localDataCenter, dcMessages, responseHandler);
}
public static Future<Void> scheduleLocalHint(final RowMutation mutation,
final InetAddress target,
final IWriteResponseHandler responseHandler,
final ConsistencyLevel consistencyLevel)
throws IOException
{
// Hint of itself doesn't make sense.
assert !target.equals(FBUtilities.getBroadcastAddress()) : target;
hintsInProgress.incrementAndGet();
Runnable runnable = new Runnable()
{
public void run()
{
if (logger.isDebugEnabled())
logger.debug("Adding hint for " + target);
try
{
Token<?> token = StorageService.instance.getTokenMetadata().getToken(target);
ByteBuffer tokenbytes = StorageService.getPartitioner().getTokenFactory().toByteArray(token);
RowMutation hintedMutation = RowMutation.hintFor(mutation, tokenbytes);
hintedMutation.apply();
totalHints.incrementAndGet();
// Notify the handler only for CL == ANY
if (responseHandler != null && consistencyLevel == ConsistencyLevel.ANY)
responseHandler.response(null);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
finally
{
// Decrement the current hint in the execution after the task is done.
hintsInProgress.decrementAndGet();
}
}
};
return (Future<Void>) StageManager.getStage(Stage.MUTATION).submit(runnable);
}
/**
* for each datacenter, send a message to one node to relay the write to other replicas
*/
private static void sendMessages(String localDataCenter, Map<String, Multimap<Message, InetAddress>> dcMessages, IWriteResponseHandler handler)
throws IOException
{
for (Map.Entry<String, Multimap<Message, InetAddress>> entry: dcMessages.entrySet())
{
String dataCenter = entry.getKey();
// send the messages corresponding to this datacenter
for (Map.Entry<Message, Collection<InetAddress>> messages: entry.getValue().asMap().entrySet())
{
Message message = messages.getKey();
// a single message object is used for unhinted writes, so clean out any forwards
// from previous loop iterations
message.removeHeader(RowMutation.FORWARD_HEADER);
if (dataCenter.equals(localDataCenter) || StorageService.instance.useEfficientCrossDCWrites())
{
// direct writes to local DC or old Cassadra versions
for (InetAddress destination : messages.getValue())
MessagingService.instance().sendRR(message, destination, handler);
}
else
{
// Non-local DC. First endpoint in list is the destination for this group
Iterator<InetAddress> iter = messages.getValue().iterator();
InetAddress target = iter.next();
// Add all the other destinations of the same message as a header in the primary message.
while (iter.hasNext())
{
InetAddress destination = iter.next();
// group all nodes in this DC as forward headers on the primary message
FastByteArrayOutputStream bos = new FastByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
// append to older addresses
byte[] previousHints = message.getHeader(RowMutation.FORWARD_HEADER);
if (previousHints != null)
dos.write(previousHints);
dos.write(destination.getAddress());
message.setHeader(RowMutation.FORWARD_HEADER, bos.toByteArray());
}
// send the combined message + forward headers
MessagingService.instance().sendRR(message, target, handler);
}
}
}
}
private static void insertLocal(final RowMutation rm, final IWriteResponseHandler responseHandler)
{
if (logger.isDebugEnabled())
logger.debug("insert writing local " + rm.toString(true));
Runnable runnable = new DroppableRunnable(StorageService.Verb.MUTATION)
{
public void runMayThrow() throws IOException
{
rm.apply();
responseHandler.response(null);
}
};
StageManager.getStage(Stage.MUTATION).execute(runnable);
}
/**
* Handle counter mutation on the coordinator host.
*
* A counter mutation needs to first be applied to a replica (that we'll call the leader for the mutation) before being
* replicated to the other endpoint. To achieve so, there is two case:
* 1) the coordinator host is a replica: we proceed to applying the update locally and replicate throug
* applyCounterMutationOnCoordinator
* 2) the coordinator is not a replica: we forward the (counter)mutation to a chosen replica (that will proceed through
* applyCounterMutationOnLeader upon receive) and wait for its acknowledgment.
*
* Implementation note: We check if we can fulfill the CL on the coordinator host even if he is not a replica to allow
* quicker response and because the WriteResponseHandlers don't make it easy to send back an error. We also always gather
* the write latencies at the coordinator node to make gathering point similar to the case of standard writes.
*/
public static IWriteResponseHandler mutateCounter(CounterMutation cm, String localDataCenter) throws UnavailableException, TimeoutException, IOException
{
InetAddress endpoint = findSuitableEndpoint(cm.getTable(), cm.key(), localDataCenter);
if (endpoint.equals(FBUtilities.getBroadcastAddress()))
{
return applyCounterMutationOnCoordinator(cm, localDataCenter);
}
else
{
// Exit now if we can't fulfill the CL here instead of forwarding to the leader replica
String table = cm.getTable();
AbstractReplicationStrategy rs = Table.open(table).getReplicationStrategy();
Collection<InetAddress> writeEndpoints = getWriteEndpoints(table, cm.key());
rs.getWriteResponseHandler(writeEndpoints, cm.consistency()).assureSufficientLiveNodes();
// Forward the actual update to the chosen leader replica
IWriteResponseHandler responseHandler = WriteResponseHandler.create(endpoint);
Message message = cm.makeMutationMessage(Gossiper.instance.getVersion(endpoint));
if (logger.isDebugEnabled())
logger.debug("forwarding counter update of key " + ByteBufferUtil.bytesToHex(cm.key()) + " to " + endpoint);
MessagingService.instance().sendRR(message, endpoint, responseHandler);
return responseHandler;
}
}
/**
* Find a suitable replica as leader for counter update.
* For now, we pick a random replica in the local DC (or ask the snitch if
* there is no replica alive in the local DC).
* TODO: if we track the latency of the counter writes (which makes sense
* contrarily to standard writes since there is a read involved), we could
* trust the dynamic snitch entirely, which may be a better solution. It
* is unclear we want to mix those latencies with read latencies, so this
* may be a bit involved.
*/
private static InetAddress findSuitableEndpoint(String table, ByteBuffer key, String localDataCenter) throws UnavailableException
{
IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
List<InetAddress> endpoints = StorageService.instance.getLiveNaturalEndpoints(table, key);
if (endpoints.isEmpty())
throw new UnavailableException();
List<InetAddress> localEndpoints = new ArrayList<InetAddress>();
for (InetAddress endpoint : endpoints)
{
if (snitch.getDatacenter(endpoint).equals(localDataCenter))
localEndpoints.add(endpoint);
}
if (localEndpoints.isEmpty())
{
// No endpoint in local DC, pick the closest endpoint according to the snitch
snitch.sortByProximity(FBUtilities.getBroadcastAddress(), endpoints);
return endpoints.get(0);
}
else
{
return localEndpoints.get(FBUtilities.threadLocalRandom().nextInt(localEndpoints.size()));
}
}
// Must be called on a replica of the mutation. This replica becomes the
// leader of this mutation.
public static IWriteResponseHandler applyCounterMutationOnLeader(CounterMutation cm, String localDataCenter) throws UnavailableException, TimeoutException, IOException
{
return performWrite(cm, cm.consistency(), localDataCenter, counterWritePerformer);
}
// Same as applyCounterMutationOnLeader but must with the difference that it use the MUTATION stage to execute the write (while
// applyCounterMutationOnLeader assumes it is on the MUTATION stage already)
public static IWriteResponseHandler applyCounterMutationOnCoordinator(CounterMutation cm, String localDataCenter) throws UnavailableException, TimeoutException, IOException
{
return performWrite(cm, cm.consistency(), localDataCenter, counterWriteOnCoordinatorPerformer);
}
private static Runnable counterWriteTask(final IMutation mutation,
final Collection<InetAddress> targets,
final IWriteResponseHandler responseHandler,
final String localDataCenter,
final ConsistencyLevel consistency_level)
{
return new DroppableRunnable(StorageService.Verb.MUTATION)
{
public void runMayThrow() throws IOException
{
assert mutation instanceof CounterMutation;
final CounterMutation cm = (CounterMutation) mutation;
// apply mutation
cm.apply();
responseHandler.response(null);
// then send to replicas, if any
targets.remove(FBUtilities.getBroadcastAddress());
if (cm.shouldReplicateOnWrite() && !targets.isEmpty())
{
// We do the replication on another stage because it involves a read (see CM.makeReplicationMutation)
// and we want to avoid blocking too much the MUTATION stage
StageManager.getStage(Stage.REPLICATE_ON_WRITE).execute(new DroppableRunnable(StorageService.Verb.READ)
{
public void runMayThrow() throws IOException, TimeoutException
{
// send mutation to other replica
sendToHintedEndpoints(cm.makeReplicationMutation(), targets, responseHandler, localDataCenter, consistency_level);
}
});
}
}
};
}
/**
* Performs the actual reading of a row out of the StorageService, fetching
* a specific set of column names from a given column family.
*/
public static List<Row> read(List<ReadCommand> commands, ConsistencyLevel consistency_level)
throws IOException, UnavailableException, TimeoutException, InvalidRequestException
{
if (StorageService.instance.isBootstrapMode())
throw new UnavailableException();
long startTime = System.nanoTime();
List<Row> rows;
try
{
rows = fetchRows(commands, consistency_level);
}
finally
{
readStats.addNano(System.nanoTime() - startTime);
}
return rows;
}
/**
* This function executes local and remote reads, and blocks for the results:
*
* 1. Get the replica locations, sorted by response time according to the snitch
* 2. Send a data request to the closest replica, and digest requests to either
* a) all the replicas, if read repair is enabled
* b) the closest R-1 replicas, where R is the number required to satisfy the ConsistencyLevel
* 3. Wait for a response from R replicas
* 4. If the digests (if any) match the data return the data
* 5. else carry out read repair by getting data from all the nodes.
*/
private static List<Row> fetchRows(List<ReadCommand> initialCommands, ConsistencyLevel consistency_level) throws IOException, UnavailableException, TimeoutException
{
List<Row> rows = new ArrayList<Row>(initialCommands.size());
List<ReadCommand> commandsToRetry = Collections.emptyList();
do
{
List<ReadCommand> commands = commandsToRetry.isEmpty() ? initialCommands : commandsToRetry;
ReadCallback<Row>[] readCallbacks = new ReadCallback[commands.size()];
if (!commandsToRetry.isEmpty())
logger.debug("Retrying {} commands", commandsToRetry.size());
// send out read requests
for (int i = 0; i < commands.size(); i++)
{
ReadCommand command = commands.get(i);
assert !command.isDigestQuery();
logger.debug("Command/ConsistencyLevel is {}/{}", command, consistency_level);
List<InetAddress> endpoints = StorageService.instance.getLiveNaturalEndpoints(command.table,
command.key);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), endpoints);
RowDigestResolver resolver = new RowDigestResolver(command.table, command.key);
ReadCallback<Row> handler = getReadCallback(resolver, command, consistency_level, endpoints);
handler.assureSufficientLiveNodes();
assert !handler.endpoints.isEmpty();
readCallbacks[i] = handler;
// The data-request message is sent to dataPoint, the node that will actually get the data for us
InetAddress dataPoint = handler.endpoints.get(0);
if (dataPoint.equals(FBUtilities.getBroadcastAddress()) && OPTIMIZE_LOCAL_REQUESTS)
{
logger.debug("reading data locally");
StageManager.getStage(Stage.READ).execute(new LocalReadRunnable(command, handler));
}
else
{
logger.debug("reading data from {}", dataPoint);
MessagingService.instance().sendRR(command, dataPoint, handler);
}
if (handler.endpoints.size() == 1)
continue;
// send the other endpoints a digest request
ReadCommand digestCommand = command.copy();
digestCommand.setDigestQuery(true);
MessageProducer producer = null;
for (InetAddress digestPoint : handler.endpoints.subList(1, handler.endpoints.size()))
{
if (digestPoint.equals(FBUtilities.getBroadcastAddress()))
{
logger.debug("reading digest locally");
StageManager.getStage(Stage.READ).execute(new LocalReadRunnable(digestCommand, handler));
}
else
{
logger.debug("reading digest from {}", digestPoint);
// (We lazy-construct the digest Message object since it may not be necessary if we
// are doing a local digest read, or no digest reads at all.)
if (producer == null)
producer = new CachingMessageProducer(digestCommand);
MessagingService.instance().sendRR(producer, digestPoint, handler);
}
}
}
// read results and make a second pass for any digest mismatches
List<ReadCommand> repairCommands = null;
List<RepairCallback> repairResponseHandlers = null;
for (int i = 0; i < commands.size(); i++)
{
ReadCallback<Row> handler = readCallbacks[i];
ReadCommand command = commands.get(i);
try
{
long startTime2 = System.currentTimeMillis();
Row row = handler.get();
if (row != null)
rows.add(row);
if (logger.isDebugEnabled())
logger.debug("Read: " + (System.currentTimeMillis() - startTime2) + " ms.");
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
logger.debug("Read timeout: {}", ex.toString());
throw ex;
}
catch (DigestMismatchException ex)
{
if (logger.isDebugEnabled())
logger.debug("Digest mismatch: {}", ex.toString());
RowRepairResolver resolver = new RowRepairResolver(command.table, command.key);
RepairCallback repairHandler = new RepairCallback(resolver, handler.endpoints);
if (repairCommands == null)
{
repairCommands = new ArrayList<ReadCommand>();
repairResponseHandlers = new ArrayList<RepairCallback>();
}
repairCommands.add(command);
repairResponseHandlers.add(repairHandler);
MessageProducer producer = new CachingMessageProducer(command);
for (InetAddress endpoint : handler.endpoints)
MessagingService.instance().sendRR(producer, endpoint, repairHandler);
}
}
if (commandsToRetry != Collections.EMPTY_LIST)
commandsToRetry.clear();
// read the results for the digest mismatch retries
if (repairResponseHandlers != null)
{
for (int i = 0; i < repairCommands.size(); i++)
{
ReadCommand command = repairCommands.get(i);
RepairCallback handler = repairResponseHandlers.get(i);
FBUtilities.waitOnFutures(handler.resolver.repairResults, DatabaseDescriptor.getRpcTimeout());
Row row;
try
{
row = handler.get();
}
catch (DigestMismatchException e)
{
throw new AssertionError(e); // full data requested from each node here, no digests should be sent
}
// retry short reads, otherwise add the row to our resultset
if (command instanceof SliceFromReadCommand)
{
// short reads are only possible on SliceFromReadCommand
SliceFromReadCommand sliceCommand = (SliceFromReadCommand) command;
int maxLiveColumns = handler.getMaxLiveColumns();
int liveColumnsInRow = row != null ? row.cf.getLiveColumnCount() : 0;
assert maxLiveColumns <= sliceCommand.count;
if ((maxLiveColumns == sliceCommand.count) && (liveColumnsInRow < sliceCommand.count))
{
logger.debug("detected short read: expected {} columns, but only resolved {} columns",
sliceCommand.count, liveColumnsInRow);
int retryCount = sliceCommand.count + sliceCommand.count - liveColumnsInRow;
SliceFromReadCommand retryCommand = new SliceFromReadCommand(command.table,
command.key,
command.queryPath,
sliceCommand.start,
sliceCommand.finish,
sliceCommand.reversed,
retryCount);
if (commandsToRetry == Collections.EMPTY_LIST)
commandsToRetry = new ArrayList<ReadCommand>();
commandsToRetry.add(retryCommand);
continue;
}
}
rows.add(row);
}
}
} while (!commandsToRetry.isEmpty());
return rows;
}
static class LocalReadRunnable extends DroppableRunnable
{
private final ReadCommand command;
private final ReadCallback<Row> handler;
private final long start = System.currentTimeMillis();
LocalReadRunnable(ReadCommand command, ReadCallback<Row> handler)
{
super(StorageService.Verb.READ);
this.command = command;
this.handler = handler;
}
protected void runMayThrow() throws IOException
{
if (logger.isDebugEnabled())
logger.debug("LocalReadRunnable reading " + command);
Table table = Table.open(command.table);
ReadResponse result = ReadVerbHandler.getResponse(command, command.getRow(table));
MessagingService.instance().addLatency(FBUtilities.getBroadcastAddress(), System.currentTimeMillis() - start);
handler.response(result);
}
}
static <T> ReadCallback<T> getReadCallback(IResponseResolver<T> resolver, IReadCommand command, ConsistencyLevel consistencyLevel, List<InetAddress> endpoints)
{
if (consistencyLevel == ConsistencyLevel.LOCAL_QUORUM || consistencyLevel == ConsistencyLevel.EACH_QUORUM)
{
return new DatacenterReadCallback(resolver, consistencyLevel, command, endpoints);
}
return new ReadCallback(resolver, consistencyLevel, command, endpoints);
}
/*
* This function executes the read protocol locally. Consistency checks are performed in the background.
*/
public static List<Row> getRangeSlice(RangeSliceCommand command, ConsistencyLevel consistency_level)
throws IOException, UnavailableException, TimeoutException
{
if (logger.isDebugEnabled())
logger.debug(command.toString());
long startTime = System.nanoTime();
List<Row> rows;
// now scan until we have enough results
try
{
rows = new ArrayList<Row>(command.max_keys);
List<AbstractBounds> ranges = getRestrictedRanges(command.range);
for (AbstractBounds range : ranges)
{
List<InetAddress> liveEndpoints = StorageService.instance.getLiveNaturalEndpoints(command.keyspace, range.right);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), liveEndpoints);
if (consistency_level == ConsistencyLevel.ONE && !liveEndpoints.isEmpty() && liveEndpoints.get(0).equals(FBUtilities.getBroadcastAddress()))
{
if (logger.isDebugEnabled())
logger.debug("local range slice");
ColumnFamilyStore cfs = Table.open(command.keyspace).getColumnFamilyStore(command.column_family);
try
{
rows.addAll(cfs.getRangeSlice(command.super_column,
range,
command.max_keys,
QueryFilter.getFilter(command.predicate, cfs.getComparator())));
}
catch (ExecutionException e)
{
throw new RuntimeException(e.getCause());
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
}
else
{
RangeSliceCommand c2 = new RangeSliceCommand(command.keyspace, command.column_family, command.super_column, command.predicate, range, command.max_keys);
// collect replies and resolve according to consistency level
RangeSliceResponseResolver resolver = new RangeSliceResponseResolver(command.keyspace, liveEndpoints);
ReadCallback<Iterable<Row>> handler = getReadCallback(resolver, command, consistency_level, liveEndpoints);
handler.assureSufficientLiveNodes();
for (InetAddress endpoint : liveEndpoints)
{
MessagingService.instance().sendRR(c2, endpoint, handler);
if (logger.isDebugEnabled())
logger.debug("reading " + c2 + " from " + endpoint);
}
try
{
for (Row row : handler.get())
{
rows.add(row);
logger.debug("range slices read {}", row.key);
}
FBUtilities.waitOnFutures(resolver.repairResults, DatabaseDescriptor.getRpcTimeout());
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
logger.debug("Range slice timeout: {}", ex.toString());
throw ex;
}
catch (DigestMismatchException e)
{
throw new AssertionError(e); // no digests in range slices yet
}
}
// if we're done, great, otherwise, move to the next range
if (rows.size() >= command.max_keys)
break;
}
}
finally
{
rangeStats.addNano(System.nanoTime() - startTime);
}
return rows.size() > command.max_keys ? rows.subList(0, command.max_keys) : rows;
}
/**
* initiate a request/response session with each live node to check whether or not everybody is using the same
* migration id. This is useful for determining if a schema change has propagated through the cluster. Disagreement
* is assumed if any node fails to respond.
*/
public static Map<String, List<String>> describeSchemaVersions()
{
final String myVersion = Schema.instance.getVersion().toString();
final Map<InetAddress, UUID> versions = new ConcurrentHashMap<InetAddress, UUID>();
final Set<InetAddress> liveHosts = Gossiper.instance.getLiveMembers();
final CountDownLatch latch = new CountDownLatch(liveHosts.size());
IAsyncCallback cb = new IAsyncCallback()
{
public void response(Message message)
{
// record the response from the remote node.
logger.debug("Received schema check response from {}", message.getFrom().getHostAddress());
UUID theirVersion = UUID.fromString(new String(message.getMessageBody()));
versions.put(message.getFrom(), theirVersion);
latch.countDown();
}
public boolean isLatencyForSnitch()
{
return false;
}
};
// an empty message acts as a request to the SchemaCheckVerbHandler.
for (InetAddress endpoint : liveHosts)
{
Message message = new Message(FBUtilities.getBroadcastAddress(),
StorageService.Verb.SCHEMA_CHECK,
ArrayUtils.EMPTY_BYTE_ARRAY,
Gossiper.instance.getVersion(endpoint));
MessagingService.instance().sendRR(message, endpoint, cb);
}
try
{
// wait for as long as possible. timeout-1s if possible.
latch.await(DatabaseDescriptor.getRpcTimeout(), TimeUnit.MILLISECONDS);
}
catch (InterruptedException ex)
{
throw new AssertionError("This latch shouldn't have been interrupted.");
}
logger.debug("My version is {}", myVersion);
// maps versions to hosts that are on that version.
Map<String, List<String>> results = new HashMap<String, List<String>>();
Iterable<InetAddress> allHosts = Iterables.concat(Gossiper.instance.getLiveMembers(), Gossiper.instance.getUnreachableMembers());
for (InetAddress host : allHosts)
{
UUID version = versions.get(host);
String stringVersion = version == null ? UNREACHABLE : version.toString();
List<String> hosts = results.get(stringVersion);
if (hosts == null)
{
hosts = new ArrayList<String>();
results.put(stringVersion, hosts);
}
hosts.add(host.getHostAddress());
}
// we're done: the results map is ready to return to the client. the rest is just debug logging:
if (results.get(UNREACHABLE) != null)
logger.debug("Hosts not in agreement. Didn't get a response from everybody: {}", StringUtils.join(results.get(UNREACHABLE), ","));
for (Map.Entry<String, List<String>> entry : results.entrySet())
{
// check for version disagreement. log the hosts that don't agree.
if (entry.getKey().equals(UNREACHABLE) || entry.getKey().equals(myVersion))
continue;
for (String host : entry.getValue())
logger.debug("{} disagrees ({})", host, entry.getKey());
}
if (results.size() == 1)
logger.debug("Schemas are in agreement.");
return results;
}
/**
* Compute all ranges we're going to query, in sorted order. Nodes can be replica destinations for many ranges,
* so we need to restrict each scan to the specific range we want, or else we'd get duplicate results.
*/
static List<AbstractBounds> getRestrictedRanges(final AbstractBounds queryRange)
{
// special case for bounds containing exactly 1 (non-minimum) token
if (queryRange instanceof Bounds && queryRange.left.equals(queryRange.right) && !queryRange.left.equals(StorageService.getPartitioner().getMinimumToken()))
{
if (logger.isDebugEnabled())
logger.debug("restricted single token match for query {}", queryRange);
return Collections.singletonList(queryRange);
}
TokenMetadata tokenMetadata = StorageService.instance.getTokenMetadata();
List<AbstractBounds> ranges = new ArrayList<AbstractBounds>();
// divide the queryRange into pieces delimited by the ring and minimum tokens
Iterator<Token> ringIter = TokenMetadata.ringIterator(tokenMetadata.sortedTokens(), queryRange.left, true);
AbstractBounds remainder = queryRange;
while (ringIter.hasNext())
{
Token token = ringIter.next();
if (remainder == null || !(remainder.left.equals(token) || remainder.contains(token)))
// no more splits
break;
Pair<AbstractBounds,AbstractBounds> splits = remainder.split(token);
if (splits.left != null)
ranges.add(splits.left);
remainder = splits.right;
}
if (remainder != null)
ranges.add(remainder);
if (logger.isDebugEnabled())
logger.debug("restricted ranges for query {} are {}", queryRange, ranges);
return ranges;
}
public long getReadOperations()
{
return readStats.getOpCount();
}
public long getTotalReadLatencyMicros()
{
return readStats.getTotalLatencyMicros();
}
public double getRecentReadLatencyMicros()
{
return readStats.getRecentLatencyMicros();
}
public long[] getTotalReadLatencyHistogramMicros()
{
return readStats.getTotalLatencyHistogramMicros();
}
public long[] getRecentReadLatencyHistogramMicros()
{
return readStats.getRecentLatencyHistogramMicros();
}
public long getRangeOperations()
{
return rangeStats.getOpCount();
}
public long getTotalRangeLatencyMicros()
{
return rangeStats.getTotalLatencyMicros();
}
public double getRecentRangeLatencyMicros()
{
return rangeStats.getRecentLatencyMicros();
}
public long[] getTotalRangeLatencyHistogramMicros()
{
return rangeStats.getTotalLatencyHistogramMicros();
}
public long[] getRecentRangeLatencyHistogramMicros()
{
return rangeStats.getRecentLatencyHistogramMicros();
}
public long getWriteOperations()
{
return writeStats.getOpCount();
}
public long getTotalWriteLatencyMicros()
{
return writeStats.getTotalLatencyMicros();
}
public double getRecentWriteLatencyMicros()
{
return writeStats.getRecentLatencyMicros();
}
public long[] getTotalWriteLatencyHistogramMicros()
{
return writeStats.getTotalLatencyHistogramMicros();
}
public long[] getRecentWriteLatencyHistogramMicros()
{
return writeStats.getRecentLatencyHistogramMicros();
}
public static List<Row> scan(final String keyspace, String column_family, IndexClause index_clause, SlicePredicate column_predicate, ConsistencyLevel consistency_level)
throws IOException, TimeoutException, UnavailableException
{
IPartitioner p = StorageService.getPartitioner();
Token leftToken = index_clause.start_key == null ? p.getMinimumToken() : p.getToken(index_clause.start_key);
List<AbstractBounds> ranges = getRestrictedRanges(new Bounds(leftToken, p.getMinimumToken()));
logger.debug("scan ranges are {}", StringUtils.join(ranges, ","));
// now scan until we have enough results
List<Row> rows = new ArrayList<Row>(index_clause.count);
for (AbstractBounds range : ranges)
{
List<InetAddress> liveEndpoints = StorageService.instance.getLiveNaturalEndpoints(keyspace, range.right);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), liveEndpoints);
// collect replies and resolve according to consistency level
RangeSliceResponseResolver resolver = new RangeSliceResponseResolver(keyspace, liveEndpoints);
IReadCommand iCommand = new IReadCommand()
{
public String getKeyspace()
{
return keyspace;
}
};
ReadCallback<Iterable<Row>> handler = getReadCallback(resolver, iCommand, consistency_level, liveEndpoints);
handler.assureSufficientLiveNodes();
IndexScanCommand command = new IndexScanCommand(keyspace, column_family, index_clause, column_predicate, range);
MessageProducer producer = new CachingMessageProducer(command);
for (InetAddress endpoint : liveEndpoints)
{
MessagingService.instance().sendRR(producer, endpoint, handler);
if (logger.isDebugEnabled())
logger.debug("reading {} from {}", command, endpoint);
}
try
{
for (Row row : handler.get())
{
rows.add(row);
logger.debug("read {}", row);
}
FBUtilities.waitOnFutures(resolver.repairResults, DatabaseDescriptor.getRpcTimeout());
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
logger.debug("Index scan timeout: {}", ex.toString());
throw ex;
}
catch (DigestMismatchException e)
{
throw new AssertionError(e);
}
if (rows.size() >= index_clause.count)
return rows.subList(0, index_clause.count);
}
return rows;
}
public boolean getHintedHandoffEnabled()
{
return hintedHandoffEnabled;
}
public void setHintedHandoffEnabled(boolean b)
{
hintedHandoffEnabled = b;
}
public int getMaxHintWindow()
{
return maxHintWindow;
}
public void setMaxHintWindow(int ms)
{
maxHintWindow = ms;
}
public static boolean shouldHint(InetAddress ep)
{
if (!hintedHandoffEnabled)
return false;
boolean hintWindowExpired = Gossiper.instance.getEndpointDowntime(ep) > maxHintWindow;
if (hintWindowExpired)
logger.debug("not hinting {} which has been down {}ms", ep, Gossiper.instance.getEndpointDowntime(ep));
return !hintWindowExpired;
}
/**
* Performs the truncate operatoin, which effectively deletes all data from
* the column family cfname
* @param keyspace
* @param cfname
* @throws UnavailableException If some of the hosts in the ring are down.
* @throws TimeoutException
* @throws IOException
*/
public static void truncateBlocking(String keyspace, String cfname) throws UnavailableException, TimeoutException, IOException
{
logger.debug("Starting a blocking truncate operation on keyspace {}, CF ", keyspace, cfname);
if (isAnyHostDown())
{
logger.info("Cannot perform truncate, some hosts are down");
// Since the truncate operation is so aggressive and is typically only
// invoked by an admin, for simplicity we require that all nodes are up
// to perform the operation.
throw new UnavailableException();
}
Set<InetAddress> allEndpoints = Gossiper.instance.getLiveMembers();
int blockFor = allEndpoints.size();
final TruncateResponseHandler responseHandler = new TruncateResponseHandler(blockFor);
// Send out the truncate calls and track the responses with the callbacks.
logger.debug("Starting to send truncate messages to hosts {}", allEndpoints);
final Truncation truncation = new Truncation(keyspace, cfname);
MessageProducer producer = new CachingMessageProducer(truncation);
for (InetAddress endpoint : allEndpoints)
MessagingService.instance().sendRR(producer, endpoint, responseHandler);
// Wait for all
logger.debug("Sent all truncate messages, now waiting for {} responses", blockFor);
responseHandler.get();
logger.debug("truncate done");
}
/**
* Asks the gossiper if there are any nodes that are currently down.
* @return true if the gossiper thinks all nodes are up.
*/
private static boolean isAnyHostDown()
{
return !Gossiper.instance.getUnreachableMembers().isEmpty();
}
private interface WritePerformer
{
public void apply(IMutation mutation, Collection<InetAddress> targets, IWriteResponseHandler responseHandler, String localDataCenter, ConsistencyLevel consistency_level) throws IOException, TimeoutException;
}
private static abstract class DroppableRunnable implements Runnable
{
private final long constructionTime = System.currentTimeMillis();
private final StorageService.Verb verb;
public DroppableRunnable(StorageService.Verb verb)
{
this.verb = verb;
}
public final void run()
{
if (System.currentTimeMillis() > constructionTime + DatabaseDescriptor.getRpcTimeout())
{
MessagingService.instance().incrementDroppedMessages(verb);
return;
}
try
{
runMayThrow();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
abstract protected void runMayThrow() throws Exception;
}
public long getTotalHints()
{
return totalHints.get();
}
public int getMaxHintsInProgress()
{
return maxHintsInProgress;
}
public void setMaxHintsInProgress(int qs)
{
maxHintsInProgress = qs;
}
public int getHintsInProgress()
{
return hintsInProgress.get();
}
public void verifyNoHintsInProgress()
{
if (getHintsInProgress() > 0)
logger.warn("Some hints were not written before shutdown. This is not supposed to happen. You should (a) run repair, and (b) file a bug report");
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tools;
import com.intellij.CommonBundle;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.options.CompoundScheme;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.ui.*;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PlatformIcons;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
public abstract class BaseToolsPanel<T extends Tool> extends JPanel {
enum Direction {
UP {
@Override
public boolean isAvailable(final int index, final int childCount) {
return index != 0;
}
@Override
public int newIndex(final int index) {
return index - 1;
}
},
DOWN {
@Override
public boolean isAvailable(final int index, final int childCount) {
return index < childCount - 1;
}
@Override
public int newIndex(final int index) {
return index + 1;
}
};
public abstract boolean isAvailable(final int index, final int childCount);
public abstract int newIndex(final int index);
}
private final CheckboxTree myTree;
private final AnActionButton myAddButton;
private final AnActionButton myCopyButton;
private final AnActionButton myEditButton;
private final AnActionButton myMoveUpButton;
private final AnActionButton myMoveDownButton;
private final AnActionButton myRemoveButton;
private boolean myIsModified = false;
private final CompoundScheme.MutatorHelper<ToolsGroup<T>, T> mutatorHelper = new CompoundScheme.MutatorHelper<ToolsGroup<T>, T>();
protected BaseToolsPanel() {
myTree = new CheckboxTree(
new CheckboxTree.CheckboxTreeCellRenderer() {
@Override
public void customizeRenderer(final JTree tree,
final Object value,
final boolean selected,
final boolean expanded,
final boolean leaf,
final int row,
final boolean hasFocus) {
if (!(value instanceof CheckedTreeNode)) return;
Object object = ((CheckedTreeNode)value).getUserObject();
if (object instanceof ToolsGroup) {
final String groupName = ((ToolsGroup)object).getName();
if (groupName != null) {
getTextRenderer().append(groupName, SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
else {
getTextRenderer().append("[unnamed group]", SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
}
else if (object instanceof Tool) {
getTextRenderer().append(((Tool)object).getName(), SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
}
}
},
new CheckedTreeNode(null)) {
@Override
protected void onDoubleClick(final CheckedTreeNode node) {
editSelected();
}
@Override
protected void onNodeStateChanged(final CheckedTreeNode node) {
myIsModified = true;
}
};
myTree.setRootVisible(false);
myTree.getEmptyText().setText(ToolsBundle.message("tools.not.configured"));
myTree.setSelectionModel(new DefaultTreeSelectionModel());
myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION);
setLayout(new BorderLayout());
add(ToolbarDecorator.createDecorator(myTree).setAddAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
ToolEditorDialog dlg = createToolEditorDialog(ToolsBundle.message("tools.add.title"));
Tool tool = new Tool();
tool.setUseConsole(true);
tool.setFilesSynchronizedAfterRun(true);
tool.setShownInMainMenu(true);
tool.setShownInEditor(true);
tool.setShownInProjectViews(true);
tool.setShownInSearchResultsPopup(true);
tool.setEnabled(true);
dlg.setData(tool, getGroups());
if (dlg.showAndGet()) {
insertNewTool(dlg.getData(), true);
}
myTree.requestFocus();
}
}).setRemoveAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
removeSelected();
}
}).setEditAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
editSelected();
myTree.requestFocus();
}
}).setMoveUpAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
moveNode(Direction.UP);
myIsModified = true;
}
}).setMoveDownAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
moveNode(Direction.DOWN);
myIsModified = true;
}
}).addExtraAction(myCopyButton = new AnActionButton(ToolsBundle.message("tools.copy.button"), PlatformIcons.COPY_ICON) {
@Override
public void actionPerformed(AnActionEvent e) {
Tool originalTool = getSelectedTool();
if (originalTool != null) {
ToolEditorDialog dlg = createToolEditorDialog(ToolsBundle.message("tools.copy.title"));
Tool toolCopy = new Tool();
toolCopy.copyFrom(originalTool);
dlg.setData(toolCopy, getGroups());
if (dlg.showAndGet()) {
insertNewTool(dlg.getData(), true);
}
myTree.requestFocus();
}
}
}).createPanel(), BorderLayout.CENTER);
myAddButton = ToolbarDecorator.findAddButton(this);
myEditButton = ToolbarDecorator.findEditButton(this);
myRemoveButton = ToolbarDecorator.findRemoveButton(this);
myMoveUpButton = ToolbarDecorator.findUpButton(this);
myMoveDownButton = ToolbarDecorator.findDownButton(this);
//TODO check edit and delete
myTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() {
@Override
public void valueChanged(TreeSelectionEvent e) {
update();
}
});
setMinimumSize(myTree.getEmptyText().getPreferredSize());
}
public void reset() {
mutatorHelper.clear();
for (ToolsGroup<T> group : getToolManager().getGroups()) {
insertNewGroup(mutatorHelper.copy(group));
}
if ((getTreeRoot()).getChildCount() > 0) {
myTree.setSelectionInterval(0, 0);
}
else {
myTree.getSelectionModel().clearSelection();
}
(getModel()).nodeStructureChanged(null);
TreeUtil.expand(myTree, 5);
myIsModified = false;
update();
}
protected abstract BaseToolManager<T> getToolManager();
@NotNull
private CheckedTreeNode insertNewGroup(@NotNull ToolsGroup<T> groupCopy) {
CheckedTreeNode groupNode = new CheckedTreeNode(groupCopy);
getTreeRoot().add(groupNode);
for (T tool : groupCopy.getElements()) {
insertNewTool(groupNode, tool);
}
return groupNode;
}
private CheckedTreeNode insertNewTool(@NotNull CheckedTreeNode groupNode, @NotNull Tool toolCopy) {
CheckedTreeNode toolNode = new CheckedTreeNode(toolCopy);
toolNode.setChecked(toolCopy.isEnabled());
((ToolsGroup)groupNode.getUserObject()).addElement(toolCopy);
groupNode.add(toolNode);
nodeWasInserted(toolNode);
return toolNode;
}
private CheckedTreeNode getTreeRoot() {
return (CheckedTreeNode)myTree.getModel().getRoot();
}
public void apply() {
getToolManager().setTools(mutatorHelper.apply(getGroupList()));
myIsModified = false;
}
@NotNull
private List<ToolsGroup<T>> getGroupList() {
MutableTreeNode root = (MutableTreeNode)myTree.getModel().getRoot();
List<ToolsGroup<T>> result = new ArrayList<ToolsGroup<T>>(root.getChildCount());
for (int i = 0; i < root.getChildCount(); i++) {
final CheckedTreeNode node = (CheckedTreeNode)root.getChildAt(i);
for (int j = 0; j < node.getChildCount(); j++) {
final CheckedTreeNode toolNode = (CheckedTreeNode)node.getChildAt(j);
((Tool)toolNode.getUserObject()).setEnabled(toolNode.isChecked());
}
//noinspection unchecked
result.add((ToolsGroup)node.getUserObject());
}
return result;
}
public boolean isModified() {
return myIsModified;
}
private void moveNode(final Direction direction) {
CheckedTreeNode node = getSelectedNode();
if (node != null) {
if (isMovingAvailable(node, direction)) {
moveNode(node, direction);
if (node.getUserObject() instanceof Tool) {
ToolsGroup group = (ToolsGroup)(((CheckedTreeNode)node.getParent()).getUserObject());
Tool tool = (Tool)node.getUserObject();
moveElementInsideGroup(tool, group, direction);
}
TreePath path = new TreePath(node.getPath());
myTree.getSelectionModel().setSelectionPath(path);
myTree.expandPath(path);
myTree.requestFocus();
}
}
}
private static void moveElementInsideGroup(final Tool tool, final ToolsGroup group, Direction dir) {
if (dir == Direction.UP) {
group.moveElementUp(tool);
}
else {
group.moveElementDown(tool);
}
}
private void moveNode(final CheckedTreeNode toolNode, Direction dir) {
CheckedTreeNode parentNode = (CheckedTreeNode)toolNode.getParent();
int index = parentNode.getIndex(toolNode);
removeNodeFromParent(toolNode);
int newIndex = dir.newIndex(index);
parentNode.insert(toolNode, newIndex);
getModel().nodesWereInserted(parentNode, new int[]{newIndex});
}
private static boolean isMovingAvailable(final CheckedTreeNode toolNode, Direction dir) {
TreeNode parent = toolNode.getParent();
int index = parent.getIndex(toolNode);
return dir.isAvailable(index, parent.getChildCount());
}
private void insertNewTool(@NotNull Tool newTool, boolean setSelection) {
CheckedTreeNode groupNode = findGroupNode(newTool.getGroup());
if (groupNode == null) {
groupNode = insertNewGroup(new ToolsGroup(newTool.getGroup()));
nodeWasInserted(groupNode);
}
CheckedTreeNode tool = insertNewTool(groupNode, newTool);
if (setSelection) {
TreePath treePath = new TreePath(tool.getPath());
myTree.expandPath(treePath);
myTree.getSelectionModel().setSelectionPath(treePath);
}
myIsModified = true;
}
private void nodeWasInserted(final CheckedTreeNode groupNode) {
(getModel()).nodesWereInserted(groupNode.getParent(), new int[]{groupNode.getParent().getChildCount() - 1});
}
private DefaultTreeModel getModel() {
return (DefaultTreeModel)myTree.getModel();
}
private CheckedTreeNode findGroupNode(final String group) {
for (int i = 0; i < getTreeRoot().getChildCount(); i++) {
CheckedTreeNode node = (CheckedTreeNode)getTreeRoot().getChildAt(i);
ToolsGroup g = (ToolsGroup)node.getUserObject();
if (Comparing.equal(group, g.getName())) return node;
}
return null;
}
@Nullable
private Tool getSelectedTool() {
CheckedTreeNode node = getSelectedToolNode();
if (node == null) return null;
return node.getUserObject() instanceof Tool ? (Tool)node.getUserObject() : null;
}
@Nullable
private ToolsGroup getSelectedToolGroup() {
CheckedTreeNode node = getSelectedToolNode();
if (node == null) return null;
return node.getUserObject() instanceof ToolsGroup ? (ToolsGroup)node.getUserObject() : null;
}
private void update() {
CheckedTreeNode node = getSelectedToolNode();
Tool selectedTool = getSelectedTool();
ToolsGroup selectedGroup = getSelectedToolGroup();
if (selectedTool != null) {
myAddButton.setEnabled(true);
myCopyButton.setEnabled(true);
myEditButton.setEnabled(true);
myMoveDownButton.setEnabled(isMovingAvailable(node, Direction.DOWN));
myMoveUpButton.setEnabled(isMovingAvailable(node, Direction.UP));
myRemoveButton.setEnabled(true);
}
else if (selectedGroup != null) {
myAddButton.setEnabled(true);
myCopyButton.setEnabled(false);
myEditButton.setEnabled(false);
myMoveDownButton.setEnabled(isMovingAvailable(node, Direction.DOWN));
myMoveUpButton.setEnabled(isMovingAvailable(node, Direction.UP));
myRemoveButton.setEnabled(true);
}
else {
myAddButton.setEnabled(true);
myCopyButton.setEnabled(false);
myEditButton.setEnabled(false);
myMoveDownButton.setEnabled(false);
myMoveUpButton.setEnabled(false);
myRemoveButton.setEnabled(false);
}
(getModel()).nodeStructureChanged(null);
myTree.repaint();
}
private void removeSelected() {
CheckedTreeNode node = getSelectedToolNode();
if (node != null) {
int result = Messages.showYesNoDialog(
this,
ToolsBundle.message("tools.delete.confirmation"),
CommonBundle.getWarningTitle(),
Messages.getWarningIcon()
);
if (result != Messages.YES) {
return;
}
myIsModified = true;
if (node.getUserObject() instanceof Tool) {
Tool tool = (Tool)node.getUserObject();
CheckedTreeNode parentNode = (CheckedTreeNode)node.getParent();
((ToolsGroup)parentNode.getUserObject()).removeElement(tool);
removeNodeFromParent(node);
if (parentNode.getChildCount() == 0) {
removeNodeFromParent(parentNode);
}
}
else if (node.getUserObject() instanceof ToolsGroup) {
removeNodeFromParent(node);
}
update();
myTree.requestFocus();
}
}
private void removeNodeFromParent(DefaultMutableTreeNode node) {
TreeNode parent = node.getParent();
int idx = parent.getIndex(node);
node.removeFromParent();
(getModel()).nodesWereRemoved(parent, new int[]{idx}, new TreeNode[]{node});
}
private void editSelected() {
CheckedTreeNode node = getSelectedToolNode();
if (node != null && node.getUserObject() instanceof Tool) {
Tool selected = (Tool)node.getUserObject();
if (selected != null) {
String oldGroupName = selected.getGroup();
ToolEditorDialog dlg = createToolEditorDialog(ToolsBundle.message("tools.edit.title"));
dlg.setData(selected, getGroups());
if (dlg.showAndGet()) {
selected.copyFrom(dlg.getData());
String newGroupName = selected.getGroup();
if (!Comparing.equal(oldGroupName, newGroupName)) {
CheckedTreeNode oldGroupNode = (CheckedTreeNode)node.getParent();
removeNodeFromParent(node);
((ToolsGroup)oldGroupNode.getUserObject()).removeElement(selected);
if (oldGroupNode.getChildCount() == 0) {
removeNodeFromParent(oldGroupNode);
}
insertNewTool(selected, true);
}
else {
(getModel()).nodeChanged(node);
}
myIsModified = true;
update();
}
}
}
}
protected ToolEditorDialog createToolEditorDialog(String title) {
return new ToolEditorDialog(this, title);
}
private CheckedTreeNode getSelectedToolNode() {
TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
return (CheckedTreeNode)selectionPath.getLastPathComponent();
}
return null;
}
private CheckedTreeNode getSelectedNode() {
TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
return (CheckedTreeNode)selectionPath.getLastPathComponent();
}
return null;
}
private String[] getGroups() {
List<String> result = new ArrayList<String>();
for (ToolsGroup group : getGroupList()) {
result.add(group.getName());
}
return ArrayUtil.toStringArray(result);
}
void addSelectionListener(TreeSelectionListener listener) {
myTree.getSelectionModel().addTreeSelectionListener(listener);
}
@Nullable
Tool getSingleSelectedTool() {
final TreePath[] selectionPaths = myTree.getSelectionPaths();
if (selectionPaths == null || selectionPaths.length != 1) {
return null;
}
Object toolOrToolGroup = ((CheckedTreeNode)selectionPaths[0].getLastPathComponent()).getUserObject();
if (toolOrToolGroup instanceof Tool) {
return (Tool)toolOrToolGroup;
}
return null;
}
public void selectTool(final String actionId) {
Object root = myTree.getModel().getRoot();
if (root == null || !(root instanceof CheckedTreeNode)) {
return;
}
final List<CheckedTreeNode> nodes = new ArrayList<CheckedTreeNode>();
new Object() {
@SuppressWarnings("unchecked")
public void collect(CheckedTreeNode node) {
if (node.isLeaf()) {
Object userObject = node.getUserObject();
if (userObject instanceof Tool && actionId.equals(((Tool)userObject).getActionId())) {
nodes.add(node);
}
}
else {
for (int i = 0; i < node.getChildCount(); i++) {
final TreeNode child = node.getChildAt(i);
if (child instanceof CheckedTreeNode) {
collect((CheckedTreeNode)child);
}
}
}
}
}.collect((CheckedTreeNode)root);
if (nodes.isEmpty()) {
return;
}
myTree.getSelectionModel().setSelectionPath(new TreePath(nodes.get(0).getPath()));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.index.bitmap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.index.HiveIndexQueryContext;
import org.apache.hadoop.hive.ql.index.HiveIndexedInputFormat;
import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer;
import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
import org.apache.hadoop.hive.ql.index.TableBasedIndexHandler;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.IndexUtils;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
/**
* Index handler for the bitmap index. Bitmap index uses an EWAH-compressed
* bitmap to represent the values in a table.
*/
public class BitmapIndexHandler extends TableBasedIndexHandler {
private Configuration configuration;
private static final Logger LOG = LoggerFactory.getLogger(BitmapIndexHandler.class.getName());
@Override
public void generateIndexQuery(List<Index> indexes, ExprNodeDesc predicate,
ParseContext pctx, HiveIndexQueryContext queryContext) {
Map<Index, ExprNodeDesc> indexPredicates = decomposePredicate(
predicate,
indexes,
queryContext);
if (indexPredicates == null) {
LOG.info("No decomposed predicate found");
queryContext.setQueryTasks(null);
return; // abort if we couldn't pull out anything from the predicate
}
List<BitmapInnerQuery> iqs = new ArrayList<BitmapInnerQuery>(indexes.size());
int i = 0;
for (Index index : indexes) {
ExprNodeDesc indexPredicate = indexPredicates.get(index);
if (indexPredicate != null) {
iqs.add(new BitmapInnerQuery(
index.getIndexTableName(),
indexPredicate,
"ind" + i++));
}
}
// setup TableScanOperator to change input format for original query
queryContext.setIndexInputFormat(HiveIndexedInputFormat.class.getName());
// Build reentrant QL for index query
StringBuilder qlCommand = new StringBuilder("INSERT OVERWRITE DIRECTORY ");
String tmpFile = pctx.getContext().getMRTmpPath().toUri().toString();
qlCommand.append( "\"" + tmpFile + "\" "); // QL includes " around file name
qlCommand.append("SELECT bucketname AS `_bucketname` , COLLECT_SET(offset) AS `_offsets` FROM ");
qlCommand.append("(SELECT `_bucketname` AS bucketname , `_offset` AS offset FROM ");
BitmapQuery head = iqs.get(0);
for ( i = 1; i < iqs.size(); i++) {
head = new BitmapOuterQuery("oind"+i, head, iqs.get(i));
}
qlCommand.append(head.toString());
qlCommand.append(" WHERE NOT EWAH_BITMAP_EMPTY(" + head.getAlias() + ".`_bitmaps`) ) tmp_index GROUP BY bucketname");
// generate tasks from index query string
LOG.info("Generating tasks for re-entrant QL query: " + qlCommand.toString());
HiveConf queryConf = new HiveConf(pctx.getConf(), BitmapIndexHandler.class);
HiveConf.setBoolVar(queryConf, HiveConf.ConfVars.COMPRESSRESULT, false);
Driver driver = new Driver(queryConf);
driver.compile(qlCommand.toString(), false);
queryContext.setIndexIntermediateFile(tmpFile);
queryContext.addAdditionalSemanticInputs(driver.getPlan().getInputs());
queryContext.setQueryTasks(driver.getPlan().getRootTasks());
}
/**
* Split the predicate into the piece we can deal with (pushed), and the one we can't (residual)
* @param predicate
* @param index
* @return
*/
private Map<Index, ExprNodeDesc> decomposePredicate(ExprNodeDesc predicate, List<Index> indexes,
HiveIndexQueryContext queryContext) {
Map<Index, ExprNodeDesc> indexPredicates = new HashMap<Index, ExprNodeDesc>();
// compute overall residual
IndexPredicateAnalyzer analyzer = getIndexPredicateAnalyzer(indexes, queryContext.getQueryPartitions());
List<IndexSearchCondition> searchConditions = new ArrayList<IndexSearchCondition>();
ExprNodeDesc residualPredicate = analyzer.analyzePredicate(predicate, searchConditions);
// pass residual predicate back out for further processing
queryContext.setResidualPredicate(residualPredicate);
if (searchConditions.size() == 0) {
return null;
}
for (Index index : indexes) {
ArrayList<Index> in = new ArrayList<Index>(1);
in.add(index);
analyzer = getIndexPredicateAnalyzer(in, queryContext.getQueryPartitions());
searchConditions = new ArrayList<IndexSearchCondition>();
// split predicate into pushed (what we can handle), and residual (what we can't handle)
// pushed predicate from translateSearchConditions is stored for the current index
// This ensures that we apply all possible predicates to each index
analyzer.analyzePredicate(predicate, searchConditions);
if (searchConditions.size() == 0) {
indexPredicates.put(index, null);
} else {
indexPredicates.put(index, analyzer.translateSearchConditions(searchConditions));
}
}
return indexPredicates;
}
/**
* Instantiate a new predicate analyzer suitable for determining
* whether we can use an index, based on rules for indexes in
* WHERE clauses that we support
*
* @return preconfigured predicate analyzer for WHERE queries
*/
private IndexPredicateAnalyzer getIndexPredicateAnalyzer(List<Index> indexes, Set<Partition> queryPartitions) {
IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
analyzer.addComparisonOp(GenericUDFOPEqual.class.getName());
analyzer.addComparisonOp(GenericUDFOPLessThan.class.getName());
analyzer.addComparisonOp(GenericUDFOPEqualOrLessThan.class.getName());
analyzer.addComparisonOp(GenericUDFOPGreaterThan.class.getName());
analyzer.addComparisonOp(GenericUDFOPEqualOrGreaterThan.class.getName());
// only return results for columns in the list of indexes
for (Index index : indexes) {
List<FieldSchema> columnSchemas = index.getSd().getCols();
for (FieldSchema column : columnSchemas) {
analyzer.allowColumnName(column.getName());
}
}
// partitioned columns are treated as if they have indexes so that the partitions
// are used during the index query generation
for (Partition part : queryPartitions) {
if (part.getSpec().isEmpty()) {
continue; // empty partitions are from whole tables, so we don't want to add them in
}
for (String column : part.getSpec().keySet()) {
analyzer.allowColumnName(column);
}
}
return analyzer;
}
@Override
public void analyzeIndexDefinition(Table baseTable, Index index,
Table indexTable) throws HiveException {
StorageDescriptor storageDesc = index.getSd();
if (this.usesIndexTable() && indexTable != null) {
StorageDescriptor indexTableSd = storageDesc.deepCopy();
List<FieldSchema> indexTblCols = indexTableSd.getCols();
FieldSchema bucketFileName = new FieldSchema("_bucketname", "string", "");
indexTblCols.add(bucketFileName);
FieldSchema offSets = new FieldSchema("_offset", "bigint", "");
indexTblCols.add(offSets);
FieldSchema bitmaps = new FieldSchema("_bitmaps", "array<bigint>", "");
indexTblCols.add(bitmaps);
indexTable.setSd(indexTableSd);
}
}
@Override
protected Task<?> getIndexBuilderMapRedTask(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
List<FieldSchema> indexField, boolean partitioned,
PartitionDesc indexTblPartDesc, String indexTableName,
PartitionDesc baseTablePartDesc, String baseTableName, String dbName) throws HiveException {
HiveConf builderConf = new HiveConf(getConf(), BitmapIndexHandler.class);
HiveConf.setBoolVar(builderConf, HiveConf.ConfVars.HIVEROWOFFSET, true);
String indexCols = HiveUtils.getUnparsedColumnNamesFromFieldSchema(indexField);
//form a new insert overwrite query.
StringBuilder command= new StringBuilder();
LinkedHashMap<String, String> partSpec = indexTblPartDesc.getPartSpec();
command.append("INSERT OVERWRITE TABLE " +
HiveUtils.unparseIdentifier(dbName) + "." + HiveUtils.unparseIdentifier(indexTableName ));
if (partitioned && indexTblPartDesc != null) {
command.append(" PARTITION ( ");
List<String> ret = getPartKVPairStringArray(partSpec);
for (int i = 0; i < ret.size(); i++) {
String partKV = ret.get(i);
command.append(partKV);
if (i < ret.size() - 1) {
command.append(",");
}
}
command.append(" ) ");
}
command.append(" SELECT ");
command.append(indexCols);
command.append(",");
command.append(VirtualColumn.FILENAME.getName());
command.append(",");
command.append(VirtualColumn.BLOCKOFFSET.getName());
command.append(",");
command.append("EWAH_BITMAP(");
command.append(VirtualColumn.ROWOFFSET.getName());
command.append(")");
command.append(" FROM " +
HiveUtils.unparseIdentifier(dbName) + "." + HiveUtils.unparseIdentifier(baseTableName));
LinkedHashMap<String, String> basePartSpec = baseTablePartDesc.getPartSpec();
if(basePartSpec != null) {
command.append(" WHERE ");
List<String> pkv = getPartKVPairStringArray(basePartSpec);
for (int i = 0; i < pkv.size(); i++) {
String partKV = pkv.get(i);
command.append(partKV);
if (i < pkv.size() - 1) {
command.append(" AND ");
}
}
}
command.append(" GROUP BY ");
command.append(VirtualColumn.FILENAME.getName());
command.append(",");
command.append(VirtualColumn.BLOCKOFFSET.getName());
for (FieldSchema fieldSchema : indexField) {
command.append(",");
command.append(HiveUtils.unparseIdentifier(fieldSchema.getName()));
}
// Require clusterby ROWOFFSET if map-size aggregation is off.
// TODO: Make this work without map side aggregation
if (!builderConf.get("hive.map.aggr", null).equals("true")) {
throw new HiveException("Cannot construct index without map-side aggregation");
}
Task<?> rootTask = IndexUtils.createRootTask(builderConf, inputs, outputs,
command, partSpec, indexTableName, dbName);
return rootTask;
}
@Override
/**
* No lower bound on bitmap index query size, so this will always return true
*/
public boolean checkQuerySize(long querySize, HiveConf hiveConf) {
return true;
}
@Override
public boolean usesIndexTable() {
return true;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.autoscaling.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class DisableMetricsCollectionRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The name or Amazon Resource Name (ARN) of the group.
* </p>
*/
private String autoScalingGroupName;
/**
* <p>
* One or more of the following metrics. If you omit this parameter, all
* metrics are disabled.
* </p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* </ul>
*/
private com.amazonaws.internal.SdkInternalList<String> metrics;
/**
* <p>
* The name or Amazon Resource Name (ARN) of the group.
* </p>
*
* @param autoScalingGroupName
* The name or Amazon Resource Name (ARN) of the group.
*/
public void setAutoScalingGroupName(String autoScalingGroupName) {
this.autoScalingGroupName = autoScalingGroupName;
}
/**
* <p>
* The name or Amazon Resource Name (ARN) of the group.
* </p>
*
* @return The name or Amazon Resource Name (ARN) of the group.
*/
public String getAutoScalingGroupName() {
return this.autoScalingGroupName;
}
/**
* <p>
* The name or Amazon Resource Name (ARN) of the group.
* </p>
*
* @param autoScalingGroupName
* The name or Amazon Resource Name (ARN) of the group.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DisableMetricsCollectionRequest withAutoScalingGroupName(
String autoScalingGroupName) {
setAutoScalingGroupName(autoScalingGroupName);
return this;
}
/**
* <p>
* One or more of the following metrics. If you omit this parameter, all
* metrics are disabled.
* </p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* </ul>
*
* @return One or more of the following metrics. If you omit this parameter,
* all metrics are disabled.</p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
*/
public java.util.List<String> getMetrics() {
if (metrics == null) {
metrics = new com.amazonaws.internal.SdkInternalList<String>();
}
return metrics;
}
/**
* <p>
* One or more of the following metrics. If you omit this parameter, all
* metrics are disabled.
* </p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* </ul>
*
* @param metrics
* One or more of the following metrics. If you omit this parameter,
* all metrics are disabled.</p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
*/
public void setMetrics(java.util.Collection<String> metrics) {
if (metrics == null) {
this.metrics = null;
return;
}
this.metrics = new com.amazonaws.internal.SdkInternalList<String>(
metrics);
}
/**
* <p>
* One or more of the following metrics. If you omit this parameter, all
* metrics are disabled.
* </p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* </ul>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setMetrics(java.util.Collection)} or
* {@link #withMetrics(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param metrics
* One or more of the following metrics. If you omit this parameter,
* all metrics are disabled.</p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DisableMetricsCollectionRequest withMetrics(String... metrics) {
if (this.metrics == null) {
setMetrics(new com.amazonaws.internal.SdkInternalList<String>(
metrics.length));
}
for (String ele : metrics) {
this.metrics.add(ele);
}
return this;
}
/**
* <p>
* One or more of the following metrics. If you omit this parameter, all
* metrics are disabled.
* </p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* </ul>
*
* @param metrics
* One or more of the following metrics. If you omit this parameter,
* all metrics are disabled.</p>
* <ul>
* <li>
* <p>
* <code>GroupMinSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupMaxSize</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupDesiredCapacity</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupInServiceInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupPendingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupStandbyInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTerminatingInstances</code>
* </p>
* </li>
* <li>
* <p>
* <code>GroupTotalInstances</code>
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DisableMetricsCollectionRequest withMetrics(
java.util.Collection<String> metrics) {
setMetrics(metrics);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAutoScalingGroupName() != null)
sb.append("AutoScalingGroupName: " + getAutoScalingGroupName()
+ ",");
if (getMetrics() != null)
sb.append("Metrics: " + getMetrics());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DisableMetricsCollectionRequest == false)
return false;
DisableMetricsCollectionRequest other = (DisableMetricsCollectionRequest) obj;
if (other.getAutoScalingGroupName() == null
^ this.getAutoScalingGroupName() == null)
return false;
if (other.getAutoScalingGroupName() != null
&& other.getAutoScalingGroupName().equals(
this.getAutoScalingGroupName()) == false)
return false;
if (other.getMetrics() == null ^ this.getMetrics() == null)
return false;
if (other.getMetrics() != null
&& other.getMetrics().equals(this.getMetrics()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getAutoScalingGroupName() == null) ? 0
: getAutoScalingGroupName().hashCode());
hashCode = prime * hashCode
+ ((getMetrics() == null) ? 0 : getMetrics().hashCode());
return hashCode;
}
@Override
public DisableMetricsCollectionRequest clone() {
return (DisableMetricsCollectionRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.core;
import static freemarker.test.hamcerst.Matchers.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import freemarker.cache.CacheStorage;
import freemarker.cache.MruCacheStorage;
import freemarker.cache.TemplateLoader;
import freemarker.core.subpkg.PublicWithMixedConstructors;
import freemarker.ext.beans.BeansWrapper;
import freemarker.ext.jython.JythonWrapper;
import freemarker.template.Configuration;
import freemarker.template.DefaultObjectWrapper;
import freemarker.template.ObjectWrapper;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
import freemarker.template.Version;
import freemarker.template.utility.WriteProtectable;
@SuppressWarnings("boxing")
public class ObjectBuilderSettingsTest {
@Test
public void newInstanceTest() throws Exception {
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(4f, res.f, 0);
assertFalse(res.b);
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(4f, res.f, 0);
assertFalse(res.b);
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(1.5, -20, 8589934592, true)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(1.5f, res.f, 0);
assertEquals(Integer.valueOf(-20), res.i);
assertEquals(8589934592l, res.l);
assertTrue(res.b);
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(1, true)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(2f, res.f, 0);
assertEquals(Integer.valueOf(1), res.i);
assertEquals(2l, res.l);
assertTrue(res.b);
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(11, 22)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(3f, res.f, 0);
assertEquals(Integer.valueOf(11), res.i);
assertEquals(22l, res.l);
assertFalse(res.b);
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(p1 = 1, p2 = 2, p3 = true, p4 = 's')",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(4f, res.f, 0);
assertFalse(res.b);
assertEquals(1d, res.getP1(), 0);
assertEquals(2, res.getP2());
assertTrue(res.isP3());
assertEquals("s", res.getP4());
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1("
+ "null, 2, p1 = 1, p2 = 2, p3 = false, p4 = null)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertNull(res.i);
assertEquals(2, res.l, 0);
assertEquals(3f, res.f, 0);
assertFalse(res.b);
assertEquals(1d, res.getP1(), 0);
assertEquals(2, res.getP2());
assertFalse(res.isP3());
assertNull(res.getP4());
}
{
// Deliberately odd spacings
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"\t\tfreemarker . core.\n\tObjectBuilderSettingsTest$TestBean1(\n\r\tp1=1\n,p2=2,p3=true,p4='s' )",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(4f, res.f, 0);
assertFalse(res.b);
assertEquals(1d, res.getP1(), 0);
assertEquals(2, res.getP2());
assertTrue(res.isP3());
assertEquals("s", res.getP4());
}
{
TestBean1 res = (TestBean1) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(1, true, p2 = 2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(2f, res.f, 0);
assertEquals(Integer.valueOf(1), res.i);
assertEquals(2l, res.l);
assertTrue(res.b);
assertEquals(0d, res.getP1(), 0);
assertEquals(2, res.getP2());
assertFalse(res.isP3());
}
}
@Test
public void builderTest() throws Exception {
{
// Backward-compatible mode, no builder:
TestBean2 res = (TestBean2) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean2",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertFalse(res.built);
assertEquals(0, res.x);
}
{
TestBean2 res = (TestBean2) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean2()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertTrue(res.built);
assertEquals(0, res.x);
}
{
TestBean2 res = (TestBean2) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean2(x = 1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertTrue(res.built);
assertEquals(1, res.x);
}
{
TestBean2 res = (TestBean2) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean2(1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertTrue(res.built);
assertEquals(1, res.x);
}
}
@Test
public void staticInstanceTest() throws Exception {
// Backward compatible mode:
{
TestBean5 res = (TestBean5) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean5",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(0, res.i);
assertEquals(0, res.x);
assertNotSame(TestBean5.INSTANCE, res);
}
{
TestBean5 res = (TestBean5) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean5()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(0, res.i);
assertEquals(0, res.x);
assertSame(TestBean5.INSTANCE, res); //!
}
{
TestBean5 res = (TestBean5) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean5(x = 1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(0, res.i);
assertEquals(1, res.x);
assertNotSame(TestBean5.INSTANCE, res);
}
{
TestBean5 res = (TestBean5) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean5(1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(1, res.i);
assertEquals(0, res.x);
assertNotSame(TestBean5.INSTANCE, res);
}
}
@Test
public void writeProtectionTest() throws Exception {
{
TestBean3 res = (TestBean3) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean3(x = 1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(1, res.x);
assertTrue(res.isWriteProtected());
try {
res.setX(2);
fail();
} catch (IllegalStateException e) {
// expected
}
}
{
// Backward-compatible mode
TestBean3 res = (TestBean3) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean3",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(0, res.x);
assertFalse(res.isWriteProtected());
res.setX(2);
}
}
@Test
public void stringLiteralsTest() throws Exception {
{
TestBean4 res = (TestBean4) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean4(\"\", '', s3 = r\"\", s4 = r'')",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals("", res.getS1());
assertEquals("", res.getS2());
assertEquals("", res.getS3());
assertEquals("", res.getS4());
}
{
TestBean4 res = (TestBean4) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean4(\"a\", 'b', s3 = r\"c\", s4 = r'd')",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals("a", res.getS1());
assertEquals("b", res.getS2());
assertEquals("c", res.getS3());
assertEquals("d", res.getS4());
}
{
TestBean4 res = (TestBean4) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean4(\"a'A\", 'b\"B', s3 = r\"c'C\", s4 = r'd\"D')",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals("a'A", res.getS1());
assertEquals("b\"B", res.getS2());
assertEquals("c'C", res.getS3());
assertEquals("d\"D", res.getS4());
}
{
TestBean4 res = (TestBean4) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean4("
+ "\"a\\nA\\\"a\\\\A\", 'a\\nA\\'a\\\\A', s3 = r\"a\\n\\A\", s4 = r'a\\n\\A')",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals("a\nA\"a\\A", res.getS1());
assertEquals("a\nA'a\\A", res.getS2());
assertEquals("a\\n\\A", res.getS3());
assertEquals("a\\n\\A", res.getS4());
}
}
@Test
public void nestedBuilderTest() throws Exception {
{
TestBean6 res = (TestBean6) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean6("
+ "freemarker.core.ObjectBuilderSettingsTest$TestBean1(11, 22, p4 = 'foo'),"
+ "1,"
+ "freemarker.core.ObjectBuilderSettingsTest$TestBean2(11),"
+ "y=2,"
+ "b3=freemarker.core.ObjectBuilderSettingsTest$TestBean2(x = 22)"
+ ")",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(Integer.valueOf(11), res.b1.i);
assertEquals(22, res.b1.l);
assertEquals("foo", res.b1.p4);
assertEquals(1, res.x);
assertEquals(11, res.b2.x);
assertEquals(2, res.y);
assertEquals(22, res.b3.x);
assertNull(res.b4);
}
{
TestBean6 res = (TestBean6) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean6("
+ "null,"
+ "-1,"
+ "null,"
+ "b4=freemarker.core.ObjectBuilderSettingsTest$TestBean6("
+ " freemarker.core.ObjectBuilderSettingsTest$TestBean1(11, 22, p4 = 'foo'),"
+ " 1,"
+ " freemarker.core.ObjectBuilderSettingsTest$TestBean2(11),"
+ " y=2,"
+ " b3=freemarker.core.ObjectBuilderSettingsTest$TestBean2(x = 22)"
+ "),"
+ "y=2"
+ ")",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertNull(res.b1);
assertEquals(-1, res.x);
assertNull(res.b2);
assertEquals(2, res.y);
assertEquals(Integer.valueOf(11), res.b4.b1.i);
assertEquals(22, res.b4.b1.l);
assertEquals("foo", res.b4.b1.p4);
assertEquals(1, res.b4.x);
assertEquals(11, res.b4.b2.x);
assertEquals(2, res.b4.y);
assertEquals(22, res.b4.b3.x);
assertNull(res.b4.b4);
}
}
@Test
public void beansWrapperTest() throws Exception {
BeansWrapper bw = (BeansWrapper) _ObjectBuilderSettingEvaluator.eval(
"BeansWrapper(2.3.21, simpleMapWrapper=true, exposeFields=true)",
ObjectWrapper.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(Configuration.VERSION_2_3_21, bw.getIncompatibleImprovements());
assertTrue(bw.isSimpleMapWrapper());
assertTrue(bw.isExposeFields());
}
@Test
public void defaultObjectWrapperTest() throws Exception {
DefaultObjectWrapper bw = (DefaultObjectWrapper) _ObjectBuilderSettingEvaluator.eval(
"DefaultObjectWrapper(2.3.21)",
ObjectWrapper.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(Configuration.VERSION_2_3_21, bw.getIncompatibleImprovements());
assertFalse(bw.isExposeFields());
}
@Test
public void jythonWrapperTest() throws Exception {
JythonWrapper jw = (JythonWrapper) _ObjectBuilderSettingEvaluator.eval(
"freemarker.ext.jython.JythonWrapper()",
ObjectWrapper.class, false, _SettingEvaluationEnvironment.getCurrent());
assertSame(JythonWrapper.INSTANCE, jw);
}
@Test
public void configurationPropertiesTest() throws TemplateException {
final Configuration cfg = new Configuration();
{
Properties props = new Properties();
props.setProperty(Configurable.OBJECT_WRAPPER_KEY, "freemarker.ext.beans.BeansWrapper(2.3.21)");
props.setProperty(Configurable.ARITHMETIC_ENGINE_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyArithmeticEngine");
props.setProperty(Configurable.TEMPLATE_EXCEPTION_HANDLER_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyTemplateExceptionHandler");
props.setProperty(Configuration.CACHE_STORAGE_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyCacheStorage()");
props.setProperty(Configurable.NEW_BUILTIN_CLASS_RESOLVER_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyNewBuiltinClassResolver()");
props.setProperty(Configuration.DEFAULT_ENCODING_KEY, "utf-8");
props.setProperty(Configuration.TEMPLATE_LOADER_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyTemplateLoader()");
cfg.setSettings(props);
assertEquals(BeansWrapper.class, cfg.getObjectWrapper().getClass());
assertTrue(((WriteProtectable) cfg.getObjectWrapper()).isWriteProtected());
assertEquals(Configuration.VERSION_2_3_21, ((BeansWrapper) cfg.getObjectWrapper()).getIncompatibleImprovements());
assertEquals(DummyArithmeticEngine.class, cfg.getArithmeticEngine().getClass());
assertEquals(DummyTemplateExceptionHandler.class, cfg.getTemplateExceptionHandler().getClass());
assertEquals(DummyCacheStorage.class, cfg.getCacheStorage().getClass());
assertEquals(DummyNewBuiltinClassResolver.class, cfg.getNewBuiltinClassResolver().getClass());
assertEquals(DummyTemplateLoader.class, cfg.getTemplateLoader().getClass());
assertEquals("utf-8", cfg.getDefaultEncoding());
}
{
Properties props = new Properties();
props.setProperty(Configurable.OBJECT_WRAPPER_KEY, "defAult");
props.setProperty(Configurable.ARITHMETIC_ENGINE_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyArithmeticEngine(x = 1)");
props.setProperty(Configurable.TEMPLATE_EXCEPTION_HANDLER_KEY,
"freemarker.core.ObjectBuilderSettingsTest$DummyTemplateExceptionHandler(x = 1)");
props.setProperty(Configuration.CACHE_STORAGE_KEY,
"soft: 500, strong: 100");
props.setProperty(Configurable.NEW_BUILTIN_CLASS_RESOLVER_KEY,
"safer");
cfg.setSettings(props);
assertEquals(DefaultObjectWrapper.class, cfg.getObjectWrapper().getClass());
assertFalse(((WriteProtectable) cfg.getObjectWrapper()).isWriteProtected());
assertEquals(1, ((DummyArithmeticEngine) cfg.getArithmeticEngine()).getX());
assertEquals(1, ((DummyTemplateExceptionHandler) cfg.getTemplateExceptionHandler()).getX());
assertEquals(Configuration.VERSION_2_3_0, ((BeansWrapper) cfg.getObjectWrapper()).getIncompatibleImprovements());
assertEquals(500, ((MruCacheStorage) cfg.getCacheStorage()).getSoftSizeLimit());
assertEquals(TemplateClassResolver.SAFER_RESOLVER, cfg.getNewBuiltinClassResolver());
assertEquals("utf-8", cfg.getDefaultEncoding());
}
{
Properties props = new Properties();
props.setProperty(Configurable.OBJECT_WRAPPER_KEY, "Beans");
props.setProperty(Configurable.ARITHMETIC_ENGINE_KEY, "bigdecimal");
props.setProperty(Configurable.TEMPLATE_EXCEPTION_HANDLER_KEY, "rethrow");
cfg.setSettings(props);
assertEquals(BeansWrapper.class, cfg.getObjectWrapper().getClass());
assertSame(ArithmeticEngine.BIGDECIMAL_ENGINE, cfg.getArithmeticEngine());
assertSame(TemplateExceptionHandler.RETHROW_HANDLER, cfg.getTemplateExceptionHandler());
assertFalse(((WriteProtectable) cfg.getObjectWrapper()).isWriteProtected());
assertEquals(Configuration.VERSION_2_3_0, ((BeansWrapper) cfg.getObjectWrapper()).getIncompatibleImprovements());
}
{
Properties props = new Properties();
props.setProperty(Configurable.OBJECT_WRAPPER_KEY, "freemarker.ext.beans.BeansWrapper");
cfg.setSettings(props);
assertEquals(BeansWrapper.class, cfg.getObjectWrapper().getClass());
assertFalse(((WriteProtectable) cfg.getObjectWrapper()).isWriteProtected());
assertEquals(Configuration.VERSION_2_3_0, ((BeansWrapper) cfg.getObjectWrapper()).getIncompatibleImprovements());
}
{
Properties props = new Properties();
props.setProperty(Configurable.OBJECT_WRAPPER_KEY, "DefaultObjectWrapper(2.3.19)");
cfg.setSettings(props);
assertEquals(DefaultObjectWrapper.class, cfg.getObjectWrapper().getClass());
assertTrue(((WriteProtectable) cfg.getObjectWrapper()).isWriteProtected());
assertEquals(Configuration.VERSION_2_3_0, ((BeansWrapper) cfg.getObjectWrapper()).getIncompatibleImprovements());
}
}
@Test
public void timeZoneTest() throws _ObjectBuilderSettingEvaluationException, ClassNotFoundException, InstantiationException, IllegalAccessException {
for (String timeZoneId : new String[] { "GMT+01", "GMT", "UTC" }) {
TestBean8 result = (TestBean8) _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean8(timeZone=TimeZone('" + timeZoneId + "'))",
TestBean8.class, false, new _SettingEvaluationEnvironment());
assertEquals(TimeZone.getTimeZone(timeZoneId), result.getTimeZone());
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean8(timeZone=TimeZone('foobar'))",
TestBean8.class, false, new _SettingEvaluationEnvironment());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getCause().getMessage(),
allOf(containsStringIgnoringCase("unrecognized"), containsString("foobar")));
}
}
@Test
public void configureBeanTest() throws Exception {
final TestBean7 bean = new TestBean7();
final String src = "a/b(s='foo', x=1, b=true), bar";
int nextPos = _ObjectBuilderSettingEvaluator.configureBean(src, src.indexOf('(') + 1, bean,
_SettingEvaluationEnvironment.getCurrent());
assertEquals("foo", bean.getS());
assertEquals(1, bean.getX());
assertTrue(bean.isB());
assertEquals(", bar", src.substring(nextPos));
}
@Test
public void parsingErrors() throws Exception {
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(1,,2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("\",\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(x=1,2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsStringIgnoringCase("must precede named"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(x=1;2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("\";\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(1,2))",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("\")\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"foo.Bar('s${x}s'))",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("${...}"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"foo.Bar('s#{x}s'))",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("#{...}"));
}
}
@Test
public void semanticErrors() throws Exception {
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$XTestBean1(1,2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsStringIgnoringCase("Failed to get class"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(true, 2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsStringIgnoringCase("constructor"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(x = 1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsStringIgnoringCase("no writeable JavaBeans property called \"x\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean1(p1 = 1, p1 = 2)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("twice"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"java.util.HashMap()",
ObjectWrapper.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("is not a(n) " + ObjectWrapper.class.getName()));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"null",
ObjectWrapper.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("can't be null"));
}
}
@Test
public void testLiteralAsObjectBuilder() throws Exception {
assertNull(_ObjectBuilderSettingEvaluator.eval(
"null",
ObjectWrapper.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals("foo", _ObjectBuilderSettingEvaluator.eval(
"'foo'",
CharSequence.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Boolean.TRUE, _ObjectBuilderSettingEvaluator.eval(
" true ",
Object.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(new BigDecimal("1.23"), _ObjectBuilderSettingEvaluator.eval(
"1.23 ",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(new Version(1, 2, 3), _ObjectBuilderSettingEvaluator.eval(
" 1.2.3",
Object.class, true, _SettingEvaluationEnvironment.getCurrent()));
}
@Test
public void testNumberLiteralJavaTypes() throws Exception {
assertEquals(new BigDecimal("1.0"), _ObjectBuilderSettingEvaluator.eval(
"1.0",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(new BigInteger("-9223372036854775809"), _ObjectBuilderSettingEvaluator.eval(
"-9223372036854775809",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(new BigInteger("9223372036854775808"), _ObjectBuilderSettingEvaluator.eval(
"9223372036854775808",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Long.valueOf(-9223372036854775808L), _ObjectBuilderSettingEvaluator.eval(
"-9223372036854775808",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Long.valueOf(9223372036854775807L), _ObjectBuilderSettingEvaluator.eval(
"9223372036854775807",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Integer.valueOf(-2147483648), _ObjectBuilderSettingEvaluator.eval(
"-2147483648",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Integer.valueOf(2147483647), _ObjectBuilderSettingEvaluator.eval(
"2147483647",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Integer.valueOf(-1), _ObjectBuilderSettingEvaluator.eval(
"-1",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Integer.valueOf(1), _ObjectBuilderSettingEvaluator.eval(
"1",
Number.class, true, _SettingEvaluationEnvironment.getCurrent()));
}
@Test
public void testListLiterals() throws Exception {
{
ArrayList<Object> expected = new ArrayList();
expected.add("s");
expected.add(null);
expected.add(true);
expected.add(new TestBean9(1));
expected.add(ImmutableList.of(11, 22, 33));
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
"['s', null, true, freemarker.core.ObjectBuilderSettingsTest$TestBean9(1), [11, 22, 33]]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
" [ 's' , null , true , freemarker.core.ObjectBuilderSettingsTest$TestBean9(1) ,"
+ " [ 11 , 22 , 33 ] ] ",
Collection.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
"['s',null,true,freemarker.core.ObjectBuilderSettingsTest$TestBean9(1),[11,22,33]]",
List.class, false, _SettingEvaluationEnvironment.getCurrent()));
}
assertEquals(Collections.emptyList(), _ObjectBuilderSettingEvaluator.eval(
"[]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Collections.emptyList(), _ObjectBuilderSettingEvaluator.eval(
"[ ]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Collections.singletonList(123), _ObjectBuilderSettingEvaluator.eval(
"[123]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Collections.singletonList(123), _ObjectBuilderSettingEvaluator.eval(
"[ 123 ]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(new TestBean9(1, ImmutableList.of("a", "b")), _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean9(1, ['a', 'b'])",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
try {
_ObjectBuilderSettingEvaluator.eval(
"[1,]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \"]\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"[,1]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \",\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"1]",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \"]\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"[1",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("end of"));
}
}
@Test
public void testMapLiterals() throws Exception {
{
HashMap<String, Object> expected = new HashMap();
expected.put("k1", "s");
expected.put("k2", null);
expected.put("k3", true);
expected.put("k4", new TestBean9(1));
expected.put("k5", ImmutableList.of(11, 22, 33));
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
"{'k1': 's', 'k2': null, 'k3': true, "
+ "'k4': freemarker.core.ObjectBuilderSettingsTest$TestBean9(1), 'k5': [11, 22, 33]}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
" { 'k1' : 's' , 'k2' : null , 'k3' : true , "
+ "'k4' : freemarker.core.ObjectBuilderSettingsTest$TestBean9 ( 1 ) , 'k5' : [ 11 , 22 , 33 ] } ",
Map.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
" {'k1':'s','k2':null,'k3':true,"
+ "'k4':freemarker.core.ObjectBuilderSettingsTest$TestBean9(1),'k5':[11,22,33]}",
LinkedHashMap.class, false, _SettingEvaluationEnvironment.getCurrent()));
}
{
HashMap<Object, String> expected = new HashMap();
expected.put(true, "T");
expected.put(1, "O");
expected.put(new TestBean9(1), "B");
expected.put(ImmutableList.of(11, 22, 33), "L");
assertEquals(expected, _ObjectBuilderSettingEvaluator.eval(
"{ true: 'T', 1: 'O', freemarker.core.ObjectBuilderSettingsTest$TestBean9(1): 'B', "
+ "[11, 22, 33]: 'L' }",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
}
assertEquals(Collections.emptyMap(), _ObjectBuilderSettingEvaluator.eval(
"{}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Collections.emptyMap(), _ObjectBuilderSettingEvaluator.eval(
"{ }",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Collections.singletonMap("k1", 123), _ObjectBuilderSettingEvaluator.eval(
"{'k1':123}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(Collections.singletonMap("k1", 123), _ObjectBuilderSettingEvaluator.eval(
"{ 'k1' : 123 }",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
assertEquals(new TestBean9(1, ImmutableMap.of(11, "a", 22, "b")), _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.ObjectBuilderSettingsTest$TestBean9(1, { 11: 'a', 22: 'b' })",
Object.class, false, _SettingEvaluationEnvironment.getCurrent()));
try {
_ObjectBuilderSettingEvaluator.eval(
"{1:2,}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \"}\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"{,1:2}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \",\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"1:2}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \":\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"1}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("found character \"}\""));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"{1",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("end of"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"{1:",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("end of"));
}
try {
_ObjectBuilderSettingEvaluator.eval(
"{null:1}",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertThat(e.getMessage(), containsString("null as key"));
}
}
@Test
public void visibilityTest() throws Exception {
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.subpkg.PackageVisibleAll()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertEquals(IllegalAccessException.class, e.getCause().getClass());
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.subpkg.PackageVisibleWithPublicConstructor()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertEquals(IllegalAccessException.class, e.getCause().getClass());
}
try {
_ObjectBuilderSettingEvaluator.eval(
"freemarker.core.subpkg.PublicWithPackageVisibleConstructor()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
fail();
} catch (_ObjectBuilderSettingEvaluationException e) {
assertEquals(IllegalAccessException.class, e.getCause().getClass());
}
{
Object o = _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.subpkg.PublicAll()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals(freemarker.core.subpkg.PublicAll.class, o.getClass());
}
{
Object o = _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.subpkg.PublicWithMixedConstructors(1)",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals("Integer", ((PublicWithMixedConstructors) o).getS());
}
{
Object o = _ObjectBuilderSettingEvaluator.eval(
"freemarker.core.subpkg.PackageVisibleAllWithBuilder()",
Object.class, false, _SettingEvaluationEnvironment.getCurrent());
assertEquals("freemarker.core.subpkg.PackageVisibleAllWithBuilder", o.getClass().getName());
}
}
public static class TestBean1 {
float f;
Integer i;
long l;
boolean b;
double p1;
int p2;
boolean p3;
String p4;
public TestBean1(float f, Integer i, long l, boolean b) {
this.f = f;
this.i = i;
this.l = l;
this.b = b;
}
public TestBean1(Integer i, boolean b) {
this.f = 2;
this.i = i;
this.l = 2;
this.b = b;
}
public TestBean1(Integer i, long l) {
this.f = 3;
this.i = i;
this.l = l;
this.b = false;
}
public TestBean1() {
this.f = 4;
}
public double getP1() {
return p1;
}
public void setP1(double p1) {
this.p1 = p1;
}
public int getP2() {
return p2;
}
public void setP2(int p2) {
this.p2 = p2;
}
public boolean isP3() {
return p3;
}
public void setP3(boolean p3) {
this.p3 = p3;
}
public String getP4() {
return p4;
}
public void setP4(String p4) {
this.p4 = p4;
}
}
public static class TestBean2 {
final boolean built;
final int x;
public TestBean2() {
this.built = false;
this.x = 0;
}
public TestBean2(int x) {
this.built = false;
this.x = x;
}
public TestBean2(TestBean2Builder builder) {
this.built = true;
this.x = builder.x;
}
}
public static class TestBean2Builder {
int x;
public TestBean2Builder() { }
public TestBean2Builder(int x) {
this.x = x;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public TestBean2 build() {
return new TestBean2(this);
}
}
public static class TestBean3 implements WriteProtectable {
private boolean writeProtected;
private int x;
public void writeProtect() {
writeProtected = true;
}
public boolean isWriteProtected() {
return writeProtected;
}
public int getX() {
return x;
}
public void setX(int x) {
if (writeProtected) throw new IllegalStateException();
this.x = x;
}
}
public static class TestBean4 {
private final String s1, s2;
private String s3, s4;
public TestBean4(String s1, String s2) {
this.s1 = s1;
this.s2 = s2;
}
public String getS1() {
return s1;
}
public String getS2() {
return s2;
}
public String getS3() {
return s3;
}
public void setS3(String s3) {
this.s3 = s3;
}
public String getS4() {
return s4;
}
public void setS4(String s4) {
this.s4 = s4;
}
}
public static class TestBean5 {
public final static TestBean5 INSTANCE = new TestBean5();
private final int i;
private int x;
public TestBean5() {
i = 0;
}
public TestBean5(int i) {
this.i = i;
}
public int getI() {
return i;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
}
public static class TestBean6 {
private final TestBean1 b1;
private int x;
private final TestBean2 b2;
private int y;
private TestBean2 b3;
private TestBean6 b4;
public TestBean6(TestBean1 b1, int x, TestBean2 b2) {
this.b1 = b1;
this.x = x;
this.b2 = b2;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
public TestBean2 getB3() {
return b3;
}
public void setB3(TestBean2 b3) {
this.b3 = b3;
}
public TestBean1 getB1() {
return b1;
}
public TestBean2 getB2() {
return b2;
}
public TestBean6 getB4() {
return b4;
}
public void setB4(TestBean6 b4) {
this.b4 = b4;
}
}
public class TestBean7 {
private String s;
private int x;
private boolean b;
public String getS() {
return s;
}
public void setS(String s) {
this.s = s;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public boolean isB() {
return b;
}
public void setB(boolean b) {
this.b = b;
}
@Override
public String toString() {
return "TestBean [s=" + s + ", x=" + x + ", b=" + b + "]";
}
}
public static class TestBean8 {
private TimeZone timeZone;
private Object anyObject;
private List<?> list;
public TimeZone getTimeZone() {
return timeZone;
}
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
}
public Object getAnyObject() {
return anyObject;
}
public void setAnyObject(Object anyObject) {
this.anyObject = anyObject;
}
public List<?> getList() {
return list;
}
public void setList(List<?> list) {
this.list = list;
}
}
public static class TestBean9 {
private final int n;
private final List<?> list;
private final Map<?, ?> map;
public TestBean9(int n) {
this(n, null, null);
}
public TestBean9(int n, List<?> list) {
this(n, list, null);
}
public TestBean9(int n, Map<?, ?> map) {
this(n, null, map);
}
public TestBean9(int n, List<?> list, Map<?, ?> map) {
this.n = n;
this.list = list;
this.map = map;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((list == null) ? 0 : list.hashCode());
result = prime * result + ((map == null) ? 0 : map.hashCode());
result = prime * result + n;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
TestBean9 other = (TestBean9) obj;
if (list == null) {
if (other.list != null) return false;
} else if (!list.equals(other.list)) return false;
if (map == null) {
if (other.map != null) return false;
} else if (!map.equals(other.map)) return false;
if (n != other.n) return false;
return true;
}
}
public static class DummyArithmeticEngine extends ArithmeticEngine {
private int x;
@Override
public int compareNumbers(Number first, Number second) throws TemplateException {
return 0;
}
@Override
public Number add(Number first, Number second) throws TemplateException {
return null;
}
@Override
public Number subtract(Number first, Number second) throws TemplateException {
return null;
}
@Override
public Number multiply(Number first, Number second) throws TemplateException {
return null;
}
@Override
public Number divide(Number first, Number second) throws TemplateException {
return null;
}
@Override
public Number modulus(Number first, Number second) throws TemplateException {
return null;
}
@Override
public Number toNumber(String s) {
return null;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
}
public static class DummyTemplateExceptionHandler implements TemplateExceptionHandler {
private int x;
public void handleTemplateException(TemplateException te, Environment env, Writer out) throws TemplateException {
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
}
public static class DummyCacheStorage implements CacheStorage {
public Object get(Object key) {
return null;
}
public void put(Object key, Object value) {
}
public void remove(Object key) {
}
public void clear() {
}
}
public static class DummyNewBuiltinClassResolver implements TemplateClassResolver {
public Class resolve(String className, Environment env, Template template) throws TemplateException {
return null;
}
}
public static class DummyTemplateLoader implements TemplateLoader {
public Object findTemplateSource(String name) throws IOException {
return null;
}
public long getLastModified(Object templateSource) {
return 0;
}
public Reader getReader(Object templateSource, String encoding) throws IOException {
return null;
}
public void closeTemplateSource(Object templateSource) throws IOException {
}
}
}
| |
/*
Derby - Class com.pivotal.gemfirexd.jdbc.ClientDRDADriver
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.jdbc;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.sql.SQLException;
import com.pivotal.gemfirexd.internal.client.am.SqlException;
import com.pivotal.gemfirexd.internal.client.am.ClientJDBCObjectFactory;
import com.pivotal.gemfirexd.internal.client.am.Utils;
import com.pivotal.gemfirexd.internal.jdbc.ClientDataSource;
import com.pivotal.gemfirexd.internal.shared.common.SharedUtils;
import com.pivotal.gemfirexd.internal.shared.common.error.ClientExceptionUtil;
// GemStone changes BEGIN
/* (original code)
import com.pivotal.gemfirexd.internal.client.net.ClientJDBCObjectFactoryImpl;
import com.pivotal.gemfirexd.internal.shared.common.reference.Attribute;
*/
// GemStone changes END
import com.pivotal.gemfirexd.internal.shared.common.reference.MessageId;
import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState;
import com.pivotal.gemfirexd.thrift.internal.ClientConfiguration;
public class ClientDRDADriver implements java.sql.Driver {
private transient int traceFileSuffixIndex_ = 0;
// support for older jdbc:sqlfire:// URL scheme has now been dropped
protected final static String URL_PREFIX = "jdbc:gemfirexd:";
protected final static String URL_SUFFIX_REGEX =
"//(([^:]+:[0-9]+)|([^\\[]+\\[[0-9]+\\]))(/(gemfirexd;)?;?(.*)?)?";
private final static String DRDA_SUBPROTOCOL = "drda:";
private final static Pattern DRDA_PROTOCOL_PATTERN = Pattern.compile(
URL_PREFIX + DRDA_SUBPROTOCOL + "//.*", Pattern.CASE_INSENSITIVE);
private final static Pattern DRDA_URL_PATTERN = Pattern.compile(URL_PREFIX
+ DRDA_SUBPROTOCOL + URL_SUFFIX_REGEX, Pattern.CASE_INSENSITIVE);
private static ClientJDBCObjectFactory factoryObject = null;
static protected SQLException exceptionsOnLoadDriver__ = null;
// Keep track of the registered driver so that we can de-register it
// if we're a stored proc.
static protected ClientDRDADriver registeredDriver__ = null;
static {
try {
registeredDriver__ = new ClientDRDADriver();
java.sql.DriverManager.registerDriver(registeredDriver__);
} catch (java.sql.SQLException e) {
// A null log writer is passed, because jdbc 1 sql exceptions are
// automatically traced
exceptionsOnLoadDriver__ = ClientExceptionUtil.newSQLException(
SQLState.JDBC_DRIVER_REGISTER, e);
}
// This may possibly hit the race-condition bug of java 1.1.
// The Configuration static clause should execute before the following line
// does.
if (ClientConfiguration.exceptionsOnLoadResources != null) {
exceptionsOnLoadDriver__ = Utils.accumulateSQLException(
ClientConfiguration.exceptionsOnLoadResources,
exceptionsOnLoadDriver__);
}
}
public ClientDRDADriver() {
}
/**
* {@inheritDoc}
*/
public java.sql.Connection connect(String url,
java.util.Properties properties) throws java.sql.SQLException {
if (!acceptsURL(url)) {
return null;
}
com.pivotal.gemfirexd.internal.client.net.NetConnection conn;
try {
if (exceptionsOnLoadDriver__ != null) {
throw exceptionsOnLoadDriver__;
}
if (properties == null) {
properties = new java.util.Properties();
}
/*
java.util.StringTokenizer urlTokenizer =
new java.util.StringTokenizer(url, "/:[]= \t\n\r\f", true);
int protocol = tokenizeProtocol(url, urlTokenizer);
if (protocol == 0) {
return null; // unrecognized database URL prefix.
}
String slashOrNull = null;
if (protocol == THRIFT_REMOTE_PROTOCOL ||
protocol == DERBY_REMOTE_PROTOCOL) {
try {
slashOrNull = urlTokenizer.nextToken(":/");
} catch (java.util.NoSuchElementException e) {
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL),
url, e);
}
}
String server = tokenizeServerName(urlTokenizer, url); // "/server"
int port = tokenizeOptionalPortNumber(urlTokenizer, url); // "[:port]/"
if (port == 0) {
port = ClientDataSource.propertyDefault_portNumber;
}
// database is the database name and attributes. This will be
// sent to network server as the databaseName
String database = tokenizeDatabase(urlTokenizer, url); // "database"
java.util.Properties augmentedProperties = tokenizeURLProperties(url, properties);
// GemStone changes BEGIN
// GemFireXD has no DB or DB properties in the name
/* (original code)
database = appendDatabaseAttributes(database,augmentedProperties);
*/
ClientExceptionUtil.init();
Matcher m = matchURL(url);
if (!m.matches()) {
throw ClientExceptionUtil.newSQLException(
SQLState.MALFORMED_URL, null, url);
}
final boolean thriftProtocol = useThriftProtocol(m);
int[] port = new int[1];
String server = getServer(m, port);
// we already verified that there is a valid database name,
// if any, in the URL, so just pass empty database name
String database = "";
java.util.Properties augmentedProperties = getURLProperties(m,
properties);
if (thriftProtocol) {
return createThriftConnection(server, port[0], augmentedProperties);
}
// GemStone changes END
int traceLevel;
try {
traceLevel = ClientDataSource.getTraceLevel(augmentedProperties);
} catch (java.lang.NumberFormatException e) {
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw ClientExceptionUtil.newSQLException(
SQLState.TRACELEVEL_FORMAT_INVALID, e);
}
// Jdbc 1 connections will write driver trace info on a
// driver-wide basis using the jdbc 1 driver manager log writer.
// This log writer may be narrowed to the connection-level
// This log writer will be passed to the agent constructor.
com.pivotal.gemfirexd.internal.client.am.LogWriter dncLogWriter =
ClientDataSource.computeDncLogWriterForNewConnection(java.sql.DriverManager.getLogWriter(),
ClientDataSource.getTraceDirectory(augmentedProperties),
ClientDataSource.getTraceFile(augmentedProperties),
ClientDataSource.getTraceFileAppend(augmentedProperties),
traceLevel,
"_driver",
traceFileSuffixIndex_++);
conn = (com.pivotal.gemfirexd.internal.client.net.NetConnection)getFactory().
newNetConnection((com.pivotal.gemfirexd.internal.client.net.NetLogWriter)
dncLogWriter,
java.sql.DriverManager.getLoginTimeout(),
server,
port[0],
database,
augmentedProperties);
} catch(SqlException se) {
throw se.getSQLException(null /* GemStoneAddition */);
}
if(conn.isConnectionNull())
return null;
return conn;
}
/*
/**
* Append attributes to the database name except for user/password
* which are sent as part of the protocol, and SSL which is used
* locally in the client.
* Other attributes will be sent to the server with the database name
* Assumes augmentedProperties is not null
*
* @param database - Short database name
* @param augmentedProperties - Set of properties to append as attributes
* @return databaseName + attributes (e.g. mydb;create=true)
*
private String appendDatabaseAttributes(String database, Properties augmentedProperties) {
StringBuilder longDatabase = new StringBuilder(database);
for (Enumeration keys = augmentedProperties.propertyNames();
keys.hasMoreElements() ;)
{
String key = (String) keys.nextElement();
if (key.equals(com.pivotal.gemfirexd.Attribute.USERNAME_ATTR) ||
// GemStone changes BEGIN
key.equals(com.pivotal.gemfirexd.Attribute.USERNAME_ALT_ATTR) ||
// GemStone changes END
key.equals(com.pivotal.gemfirexd.Attribute.PASSWORD_ATTR) ||
key.equals(ClientAttribute.SSL))
continue;
longDatabase.append(";" + key + "=" + augmentedProperties.getProperty(key));
}
return longDatabase.toString();
}
*/
/**
* {@inheritDoc}
*/
public boolean acceptsURL(String url) throws java.sql.SQLException {
return (url != null && matchProtocol(url).matches());
}
/**
* {@inheritDoc}
*/
public java.sql.DriverPropertyInfo[] getPropertyInfo(String url,
java.util.Properties properties) throws java.sql.SQLException {
java.sql.DriverPropertyInfo driverPropertyInfo[] = new java.sql.DriverPropertyInfo[2];
// If there are no properties set already,
// then create a dummy properties just to make the calls go thru.
if (properties == null) {
properties = new java.util.Properties();
}
// GemStone changes BEGIN
/* (original code)
driverPropertyInfo[0] =
new java.sql.DriverPropertyInfo(Attribute.USERNAME_ATTR,
properties.getProperty(Attribute.USERNAME_ATTR, ClientDataSource.propertyDefault_user));
*/
boolean isUserNameAttribute = false;
String userName = properties.getProperty(com.pivotal.gemfirexd.Attribute.USERNAME_ATTR);
if( userName == null) {
userName = properties.getProperty(com.pivotal.gemfirexd.Attribute.USERNAME_ALT_ATTR);
if(userName != null) {
isUserNameAttribute = true;
}
}
driverPropertyInfo[0] = new java.sql.DriverPropertyInfo(
isUserNameAttribute ? com.pivotal.gemfirexd.Attribute.USERNAME_ALT_ATTR
: com.pivotal.gemfirexd.Attribute.USERNAME_ATTR, userName);
// GemStone changes END
driverPropertyInfo[1] =
new java.sql.DriverPropertyInfo(com.pivotal.gemfirexd.Attribute.PASSWORD_ATTR,
properties.getProperty(com.pivotal.gemfirexd.Attribute.PASSWORD_ATTR));
driverPropertyInfo[0].description =
SqlException.getMessageUtil().getTextMessage(
MessageId.CONN_USERNAME_DESCRIPTION);
driverPropertyInfo[1].description =
SqlException.getMessageUtil().getTextMessage(
MessageId.CONN_PASSWORD_DESCRIPTION);
driverPropertyInfo[0].required = true;
driverPropertyInfo[1].required = false; // depending on the security mechanism
return driverPropertyInfo;
}
/**
* {@inheritDoc}
*/
public int getMajorVersion() {
return ClientConfiguration.getProductVersionHolder().getMajorVersion();
}
/**
* {@inheritDoc}
*/
public int getMinorVersion() {
return ClientConfiguration.getProductVersionHolder().getMinorVersion();
}
/**
* {@inheritDoc}
*/
public boolean jdbcCompliant() {
return ClientConfiguration.jdbcCompliant;
}
// ----------------helper methods---------------------------------------------
/*
// Tokenize one of the following:
// "jdbc:derby:"
// and return 0 if the protcol is unrecognized
// return DERBY_PROTOCOL for "jdbc:derby"
private static int tokenizeProtocol(String url, java.util.StringTokenizer urlTokenizer) throws SqlException {
// Is this condition necessary, StringTokenizer constructor may do this for us
if (url == null) {
return 0;
}
if (urlTokenizer == null) {
return 0;
}
try {
String jdbc = urlTokenizer.nextToken(":");
if (!jdbc.equals("jdbc")) {
return 0;
}
if (!urlTokenizer.nextToken(":").equals(":")) {
return 0; // Skip over the first colon in jdbc:derby:
}
String dbname = urlTokenizer.nextToken(":");
int protocol = 0;
// GemStone changes BEGIN
if ( (dbname.equals("gemfirexd") && (url.indexOf("gemfirexd://") != -1))
//SQLF:BC
|| (dbname.equals("sqlfire") && (url.indexOf("sqlfire://") != -1))
) {
/* if (dbname.equals("derby") && (url.indexOf("derby://") != -1)) { *
// GemStone changes END
// For Derby AS need to check for // since jdbc:derby: is also the
// embedded prefix
protocol = DERBY_REMOTE_PROTOCOL;
} else {
return 0;
}
if (!urlTokenizer.nextToken(":").equals(":")) {
return 0; // Skip over the second colon in jdbc:derby:
}
return protocol;
} catch (java.util.NoSuchElementException e) {
return 0;
}
}
// tokenize "/server" from URL jdbc:derby://server:port/
// returns server name
private static String tokenizeServerName(java.util.StringTokenizer urlTokenizer,
String url) throws SqlException {
try {
if (!urlTokenizer.nextToken("/").equals("/"))
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
{
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL), url);
}
return urlTokenizer.nextToken("/:[");
} catch (java.util.NoSuchElementException e) {
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL), url);
}
}
// tokenize "[:portNumber]/" from URL jdbc:derby://server[:port]/
// returns the portNumber or zero if portNumber is not specified.
private static int tokenizeOptionalPortNumber(java.util.StringTokenizer urlTokenizer,
String url) throws SqlException {
try {
String firstToken = urlTokenizer.nextToken(":/");
if (firstToken.equals(":")) {
String port = urlTokenizer.nextToken("/");
// GemStone changes BEGIN
// allow for no trailing slash since dbname is optional
if (urlTokenizer.hasMoreTokens() &&
!urlTokenizer.nextToken("/").equals("/")) {
/* if (!urlTokenizer.nextToken("/").equals("/")) { *
// GemStone changes END
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL), url);
}
return Integer.parseInt(port);
} else if (firstToken.equals("/")) {
return 0;
} else {
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL), url);
}
} catch (java.util.NoSuchElementException e) {
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL), url, e);
}
}
//return database name
private static String tokenizeDatabase(java.util.StringTokenizer urlTokenizer,
String url) throws SqlException {
try {
// DERBY-618 - database name can contain spaces in the path
String databaseName = urlTokenizer.nextToken("\t\n\r\f;");
// GemStone changes BEGIN
if (databaseName != null && databaseName.length() > 0
&& databaseName.charAt(0) != ';'
&& !"gemfirexd".equalsIgnoreCase(databaseName)) {
// DB name should not be present in the client-side URL
throw new SqlException(null, new ClientMessageId(
SQLState.MALFORMED_URL), url + " => (unexpected DB name '"
+ databaseName + "')");
}
} catch (java.util.NoSuchElementException ex) {
// deliberately ignored for GemFireXD since it does not require
// a database name
}
return "";
/* (original derby code)
return databaseName;
} catch (java.util.NoSuchElementException e) {
// A null log writer is passed, because jdbc 1 sqlexceptions are automatically traced
throw new SqlException(null,
new ClientMessageId(SQLState.MALFORMED_URL), url, e);
}
*
// GemStone changes END
}
private static java.util.Properties tokenizeURLProperties(String url,
java.util.Properties properties)
throws SqlException {
String attributeString = null;
int attributeIndex = -1;
if ((url != null) &&
((attributeIndex = url.indexOf(";")) != -1)) {
attributeString = url.substring(attributeIndex);
}
return ClientDataSource.tokenizeAttributes(attributeString, properties);
}
*/
/**
*This method returns an Implementation
*of ClientJDBCObjectFactory depending on
*VM under use
*Currently it returns either
*ClientJDBCObjectFactoryImpl
*(or)
*ClientJDBCObjectFactoryImpl40
*/
public static ClientJDBCObjectFactory getFactory() {
if(factoryObject!=null)
return factoryObject;
if(ClientConfiguration.supportsJDBC40()) {
factoryObject = createJDBC40FactoryImpl();
} else {
factoryObject = createDefaultFactoryImpl();
}
return factoryObject;
}
/**
*Returns an instance of the ClientJDBCObjectFactoryImpl class
*/
private static ClientJDBCObjectFactory createDefaultFactoryImpl() {
// GemStone changes BEGIN
final String factoryName =
"com.pivotal.gemfirexd.internal.client.net.ClientJDBCObjectFactoryImpl";
try {
return (ClientJDBCObjectFactory)Class.forName(factoryName)
.newInstance();
} catch (Exception e) {
final Error err = new NoClassDefFoundError("unable to load JDBC "
+ "connection factory (ClientJDBCObjectFactoryImpl)");
err.initCause(e);
throw err;
}
/* (original code)
return new ClientJDBCObjectFactoryImpl();
*/
// GemStone changes END
}
/**
*Returns an instance of the ClientJDBCObjectFactoryImpl40 class
*If a ClassNotFoundException occurs then it returns an
*instance of ClientJDBCObjectFactoryImpl
*
*If a future version of JDBC comes then
*a similar method would be added say createJDBCXXFactoryImpl
*in which if the class is not found then it would
*return the lower version thus having a sort of cascading effect
*until it gets a valid instance
*/
private static ClientJDBCObjectFactory createJDBC40FactoryImpl() {
final String factoryName =
"com.pivotal.gemfirexd.internal.client.net.ClientJDBCObjectFactoryImpl40";
try {
return (ClientJDBCObjectFactory)
Class.forName(factoryName).newInstance();
} catch (ClassNotFoundException cnfe) {
return createDefaultFactoryImpl();
} catch (InstantiationException ie) {
return createDefaultFactoryImpl();
} catch (IllegalAccessException iae) {
return createDefaultFactoryImpl();
}
}
// GemStone changes BEGIN
protected boolean useThriftProtocol(Matcher m) {
return false;
}
protected String getServer(Matcher m, int[] port) {
String serverGroup = m.group(3);
if (serverGroup != null && serverGroup.length() > 0) {
return SharedUtils.getHostPort(serverGroup, port);
}
else {
return SharedUtils.getHostPort(m.group(4), port);
}
}
protected java.util.Properties getURLProperties(Matcher m,
java.util.Properties properties) throws SqlException {
String propsGroup = m.group(7);
if (propsGroup != null && propsGroup.length() > 0) {
return ClientDataSource.tokenizeAttributes(propsGroup, properties);
}
else {
return properties;
}
}
protected Matcher matchURL(String url) {
return DRDA_URL_PATTERN.matcher(url);
}
protected Matcher matchProtocol(String url) {
return DRDA_PROTOCOL_PATTERN.matcher(url);
}
protected java.sql.Connection createThriftConnection(String server, int port,
java.util.Properties props) throws SQLException {
throw new AssertionError(
"ClientDRDADriver.createThriftConnection not expected to be invoked");
}
public static ClientDRDADriver getRegisteredDriver() {
return registeredDriver__;
}
// JDBC 4.1 methods since jdk 1.7
public Logger getParentLogger() {
throw new AssertionError("should be overridden in JDBC 4.1");
}
// GemStone changes END
}
| |
/*
* Copyright 2014 Red Hat, Inc.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.vertx.ext.web.handler;
import io.vertx.core.http.HttpMethod;
import io.vertx.core.http.HttpServer;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.auth.PubSecKeyOptions;
import io.vertx.ext.auth.oauth2.OAuth2Auth;
import io.vertx.ext.auth.oauth2.OAuth2ClientOptions;
import io.vertx.ext.auth.oauth2.OAuth2FlowType;
import io.vertx.ext.jwt.JWK;
import io.vertx.ext.jwt.JWT;
import io.vertx.ext.jwt.JWTOptions;
import io.vertx.ext.web.WebTestBase;
import org.junit.Test;
import java.util.Base64;
import java.util.concurrent.CountDownLatch;
/**
* @author Paulo Lopes
*/
public class OAuth2AuthHandlerTest extends WebTestBase {
private static final JsonObject fixture = new JsonObject(
"{" +
" \"access_token\": \"4adc339e0\"," +
" \"refresh_token\": \"ec1a59d298\"," +
" \"token_type\": \"bearer\"," +
" \"expires_in\": 7200" +
"}");
@Override
public void tearDown() throws Exception {
super.tearDown();
}
private String redirectURL = null;
@Test
public void testAuthCodeFlow() throws Exception {
// lets mock a oauth2 server using code auth code flow
OAuth2Auth oauth2 = OAuth2Auth.create(vertx, new OAuth2ClientOptions()
.setClientID("client-id")
.setFlow(OAuth2FlowType.AUTH_CODE)
.setClientSecret("client-secret")
.setSite("http://localhost:10000"));
final CountDownLatch latch = new CountDownLatch(1);
HttpServer server = vertx.createHttpServer().requestHandler(req -> {
if (req.method() == HttpMethod.POST && "/oauth/token".equals(req.path())) {
req.setExpectMultipart(true).bodyHandler(buffer -> req.response().putHeader("Content-Type", "application/json").end(fixture.encode()));
} else if (req.method() == HttpMethod.POST && "/oauth/revoke".equals(req.path())) {
req.setExpectMultipart(true).bodyHandler(buffer -> req.response().end());
} else {
req.response().setStatusCode(400).end();
}
}).listen(10000, ready -> {
if (ready.failed()) {
throw new RuntimeException(ready.cause());
}
// ready
latch.countDown();
});
latch.await();
// create a oauth2 handler on our domain to the callback: "http://localhost:8080/callback"
OAuth2AuthHandler oauth2Handler = OAuth2AuthHandler.create(vertx, oauth2, "http://localhost:8080/callback");
// setup the callback handler for receiving the callback
oauth2Handler.setupCallback(router.route());
// protect everything under /protected
router.route("/protected/*").handler(oauth2Handler);
// mount some handler under the protected zone
router.route("/protected/somepage").handler(rc -> {
assertNotNull(rc.user());
rc.response().end("Welcome to the protected resource!");
});
testRequest(HttpMethod.GET, "/protected/somepage", null, resp -> {
// in this case we should get a redirect
redirectURL = resp.getHeader("Location");
assertNotNull(redirectURL);
}, 302, "Found", null);
// fake the redirect
testRequest(HttpMethod.GET, "/callback?state=/protected/somepage&code=1", null, resp -> {
}, 200, "OK", "Welcome to the protected resource!");
server.close();
}
@Test
public void testAuthCodeFlowBadSetup() throws Exception {
// lets mock a oauth2 server using code auth code flow
OAuth2Auth oauth2 = OAuth2Auth.create(vertx, new OAuth2ClientOptions()
.setFlow(OAuth2FlowType.AUTH_CODE)
.setClientID("client-id")
.setClientSecret("client-secret")
.setSite("http://localhost:10000"));
final CountDownLatch latch = new CountDownLatch(1);
HttpServer server = vertx.createHttpServer().requestHandler(req -> {
if (req.method() == HttpMethod.POST && "/oauth/token".equals(req.path())) {
req.setExpectMultipart(true).bodyHandler(buffer -> req.response().putHeader("Content-Type", "application/json").end(fixture.encode()));
} else if (req.method() == HttpMethod.POST && "/oauth/revoke".equals(req.path())) {
req.setExpectMultipart(true).bodyHandler(buffer -> req.response().end());
} else {
req.response().setStatusCode(400).end();
}
}).listen(10000, ready -> {
if (ready.failed()) {
throw new RuntimeException(ready.cause());
}
// ready
latch.countDown();
});
latch.await();
// protect everything. This has the bad sideffect that it will also shade the callback route which is computed
// after this handler, the proper way to fix this would be create the route before
router.route()
.handler(
OAuth2AuthHandler
.create(vertx, oauth2, "http://localhost:8080/callback")
.setupCallback(router.route()));
// mount some handler under the protected zone
router.route("/protected/somepage").handler(rc -> {
assertNotNull(rc.user());
rc.response().end("Welcome to the protected resource!");
});
testRequest(HttpMethod.GET, "/protected/somepage", null, resp -> {
// in this case we should get a redirect
redirectURL = resp.getHeader("Location");
assertNotNull(redirectURL);
}, 302, "Found", null);
// fake the redirect
testRequest(HttpMethod.GET, "/callback?state=/protected/somepage&code=1", null, resp -> {
}, 500, "Internal Server Error", "Internal Server Error");
// second attempt with proper config
router.clear();
// protect everything.
OAuth2AuthHandler oauth2Handler = OAuth2AuthHandler
.create(vertx, oauth2, "http://localhost:8080/callback")
.setupCallback(router.route());
// now the callback is registered before as it should
router.route().handler(oauth2Handler);
// mount some handler under the protected zone
router.route("/protected/somepage").handler(rc -> {
assertNotNull(rc.user());
rc.response().end("Welcome to the protected resource!");
});
testRequest(HttpMethod.GET, "/protected/somepage", null, resp -> {
// in this case we should get a redirect
redirectURL = resp.getHeader("Location");
assertNotNull(redirectURL);
}, 302, "Found", null);
// fake the redirect
testRequest(HttpMethod.GET, "/callback?state=/protected/somepage&code=1", null, resp -> {
}, 200, "OK", "Welcome to the protected resource!");
server.close();
}
@Test
public void testPasswordFlow() throws Exception {
// lets mock a oauth2 server using code auth code flow
OAuth2Auth oauth2 = OAuth2Auth.create(vertx, new OAuth2ClientOptions()
.setClientID("client-id")
.setClientSecret("client-secret")
.setSite("http://localhost:10000")
.setFlow(OAuth2FlowType.PASSWORD));
final CountDownLatch latch = new CountDownLatch(1);
HttpServer server = vertx.createHttpServer().requestHandler(req -> {
if (req.method() == HttpMethod.POST && "/oauth/token".equals(req.path())) {
req.setExpectMultipart(true).bodyHandler(buffer -> {
final String queryString = buffer.toString();
assertTrue(queryString.contains("username=paulo"));
assertTrue(queryString.contains("password=bananas"));
assertTrue(queryString.contains("grant_type=password"));
req.response().putHeader("Content-Type", "application/json").end(fixture.encode());
});
} else if (req.method() == HttpMethod.POST && "/oauth/revoke".equals(req.path())) {
req.setExpectMultipart(true).bodyHandler(buffer -> req.response().end());
} else {
req.response().setStatusCode(400).end();
}
}).listen(10000, ready -> {
if (ready.failed()) {
throw new RuntimeException(ready.cause());
}
// ready
latch.countDown();
});
latch.await();
AuthenticationHandler oauth2Handler = BasicAuthHandler.create(oauth2);
// protect everything under /protected
router.route("/protected/*").handler(oauth2Handler);
// mount some handler under the protected zone
router.route("/protected/somepage").handler(rc -> {
assertNotNull(rc.user());
rc.response().end("Welcome to the protected resource!");
});
testRequest(HttpMethod.GET, "/protected/somepage", req -> req.putHeader("Authorization", "Basic " + Base64.getEncoder().encodeToString("paulo:bananas".getBytes())), res -> {
// in this case we should get the resource
}, 200, "OK", "Welcome to the protected resource!");
testRequest(HttpMethod.GET, "/protected/somepage", 401, "Unauthorized");
server.close();
}
@Test
public void testBearerOnly() throws Exception {
// lets mock a oauth2 server using code auth code flow
OAuth2Auth oauth2 = OAuth2Auth.create(vertx, new OAuth2ClientOptions().setFlow(OAuth2FlowType.AUTH_CODE).setClientID("client-id"));
OAuth2AuthHandler oauth2Handler = OAuth2AuthHandler.create(vertx, oauth2);
// protect everything under /protected
router.route("/protected/*").handler(oauth2Handler);
// mount some handler under the protected zone
router.route("/protected/somepage").handler(rc -> {
assertNotNull(rc.user());
rc.response().end("Welcome to the protected resource!");
});
testRequest(HttpMethod.GET, "/protected/somepage", 401, "Unauthorized");
// Now try again with fake credentials
testRequest(HttpMethod.GET, "/protected/somepage", req -> req.putHeader("Authorization", "Bearer 4adc339e0"), 401, "Unauthorized", "Unauthorized");
}
@Test
public void testBearerOnlyWithJWT() throws Exception {
OAuth2Auth oauth = OAuth2Auth
.create(
vertx,
new OAuth2ClientOptions()
.setClientID("dummy-client")
.addPubSecKey(new PubSecKeyOptions()
.setAlgorithm("RS256")
.setBuffer(
"-----BEGIN PUBLIC KEY-----\n" +
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmuIC9Qvwoe/3tUpHkcUp\n" +
"vWmzQqnZtz3HBKbxzc/jBTxUHefJDs88Xjw5nNXhl4tXkHzFRAZHtDnwX074/2oc\n" +
"PRSWaBjHYXB771af91UPrc9fb4lh3W1a8hmQU6sgKlQVwDnUuePDkCmwKCsuyX0M\n" +
"wxuwOwEUo4r15NBh/H7FvuHVPnqWK1/kliYtQukF3svQkpZT6/puQ0bEOefROLB+\n" +
"EAPM0OAaDyknjxCZJenk9FIyC6skOKVaxW7CcE54lIUjS1GKFQc44/+T+u0VKSmh\n" +
"rRdBNcAhXmdpwjLoDTy/I8z+uqkKitdEVczCdleNqeb6b1kjPWS3VbLXxY/LIYlz\n" +
"uQIDAQAB\n" +
"-----END PUBLIC KEY-----")
)
);
assertNotNull(oauth);
JWT jwt = new JWT().addJWK(
new JWK(new PubSecKeyOptions()
.setAlgorithm("RS256")
.setBuffer(
"-----BEGIN PRIVATE KEY-----\n" +
"MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCa4gL1C/Ch7/e1\n" +
"SkeRxSm9abNCqdm3PccEpvHNz+MFPFQd58kOzzxePDmc1eGXi1eQfMVEBke0OfBf\n" +
"Tvj/ahw9FJZoGMdhcHvvVp/3VQ+tz19viWHdbVryGZBTqyAqVBXAOdS548OQKbAo\n" +
"Ky7JfQzDG7A7ARSjivXk0GH8fsW+4dU+epYrX+SWJi1C6QXey9CSllPr+m5DRsQ5\n" +
"59E4sH4QA8zQ4BoPKSePEJkl6eT0UjILqyQ4pVrFbsJwTniUhSNLUYoVBzjj/5P6\n" +
"7RUpKaGtF0E1wCFeZ2nCMugNPL8jzP66qQqK10RVzMJ2V42p5vpvWSM9ZLdVstfF\n" +
"j8shiXO5AgMBAAECggEAIriwOQcoNuV4/qdcTA2LQe9ERJmXOUEcMKrMYntMRYw0\n" +
"v0+K/0ruGaIeuE4qeLLAOp/+CTXvNTQX8wXdREUhd3/6B/QmHm39GrasveHP1gM7\n" +
"PeHqkp1FWijo9hjS6SpYhfNxAQtSeCsgVqD3qCvkhIjchR3E5rTsUxN0JAq3ggb9\n" +
"WCJ2LUxOOTHAWL4cv7FIKfwU/bwjBdHbSLuh7em4IE8tzcFgh49281APprGb4a3d\n" +
"CPlIZC+CQmTFKPGzT0WDNc3EbPPKcx8ECRf1Zo94Tqnzv7FLgCmr0o4O9e6E3yss\n" +
"Uwp7EKPUQyAwBkc+pHwqUmOPqHB+z28JUOwqoD0vQQKBgQDNiXSydWh9BUWAleQU\n" +
"fgSF0bjlt38HVcyMKGC1xQhi8VeAfLJxGCGbdxsPFNCtMPDLRRyd4xHBmsCmPPli\n" +
"CFHD1UbfNuKma6azl6A86geuTolgrHoxp57tZwoBpG9JHoTA53pfBPxb8q39YXKh\n" +
"DSXsJVldxsHwzFAklj3ZqzWq3QKBgQDA6M/VW3SXEt1NWwMI+WGa/QKHDjLDhZzF\n" +
"F3iQTtzDDmA4louAzX1cykNo6Y7SpORi0ralml65iwT2HZtE8w9vbw4LNmBiHmlX\n" +
"AvpZSHT6/7nQeiFtxZu9cyw4GGpNSaeqp4Cq6TGYmfbq4nIdryzUU2AgsqSZyrra\n" +
"xh7K+2I4jQKBgGjC8xQy+7sdgLt1qvc29B8xMkkEKl8WwFeADSsY7plf4fW/mURD\n" +
"xH11S/l35pUgKNuysk9Xealws1kIIyRwkRx8DM+hLg0dOa64Thg+QQP7S9JWl0HP\n" +
"6hWfO15y7bYbNBcO5TShWe+T1lMb5E1qYjXnI5HEyP1vZjn/yi60MXqRAoGAe6F4\n" +
"+QLIwL1dSOMoGctBS4QU55so23e41fNJ2CpCf1uqPPn2Y9DOI/aYpxbv6n20xMTI\n" +
"O2+of37h6h1lUhX38XGZ7YOm15sn5ZTJ/whZuDbFzh9HZ0N6oTq7vyOelPO8WblJ\n" +
"077pgyRBQ51mhzGqKFVayPnUVZ/Ais7oEyxycU0CgYEAzEUhmN22ykywh0My83z/\n" +
"7yl2tyrlv2hcZbaP7+9eHdUafGG8jMTVD7jxhzAbiSo2UeyHUnAItDnLetLh89K6\n" +
"0oF3/rZLqugtb+f48dgRE/SDF4Itgp5fDqWHLhEW7ZhWCFlFgZ3sq0XryIxzFof0\n" +
"O/Fd1NnotirzTnob5ReblIM=\n" +
"-----END PRIVATE KEY-----\n")));
assertNotNull(jwt);
// lets mock a oauth2 server using code auth code flow
OAuth2AuthHandler oauth2Handler = OAuth2AuthHandler.create(vertx, oauth);
// protect everything under /protected
router.route("/protected/*").handler(oauth2Handler);
// mount some handler under the protected zone
router.route("/protected/somepage").handler(rc -> {
assertNotNull(rc.user());
rc.response().end("Welcome to the protected resource!");
});
testRequest(HttpMethod.GET, "/protected/somepage", 401, "Unauthorized");
// Now try again with fake credentials
testRequest(HttpMethod.GET, "/protected/somepage", req -> req.putHeader("Authorization", "Bearer 4adc339e0"), 401, "Unauthorized", "Unauthorized");
// Now try again with real credentials
String accessToken = jwt.sign(
new JsonObject(
"{\n" +
" \"iss\": \"https://server.example.com\",\n" +
" \"aud\": \"s6BhdRkqt3\",\n" +
" \"jti\": \"a-123\",\n" +
" \"exp\": 999999999999,\n" +
" \"iat\": 1311280970,\n" +
" \"sub\": \"24400320\",\n" +
" \"upn\": \"jdoe@server.example.com\",\n" +
" \"groups\": [\"red-group\", \"green-group\", \"admin-group\", \"admin\"]\n" +
"}"), new JWTOptions().setAlgorithm("RS256"));
testRequest(HttpMethod.GET, "/protected/somepage", req -> req.putHeader("Authorization", "Bearer " + accessToken), 200, "OK", "Welcome to the protected resource!");
// Now try again with expired credentials
String accessTokenExp = jwt.sign(
new JsonObject(
"{\n" +
" \"iss\": \"https://server.example.com\",\n" +
" \"aud\": \"s6BhdRkqt3\",\n" +
" \"jti\": \"a-123\",\n" +
" \"exp\": 1311280970,\n" +
" \"iat\": 1311280970,\n" +
" \"sub\": \"24400320\",\n" +
" \"upn\": \"jdoe@server.example.com\",\n" +
" \"groups\": [\"red-group\", \"green-group\", \"admin-group\", \"admin\"]\n" +
"}"), new JWTOptions().setAlgorithm("RS256"));
testRequest(HttpMethod.GET, "/protected/somepage", req -> req.putHeader("Authorization", "Bearer " + accessTokenExp), 401, "Unauthorized", "Unauthorized");
}
}
| |
/**
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.litecoin.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
import static com.google.litecoin.core.Utils.*;
/**
* <p>Methods to serialize and de-serialize messages to the Litecoin network format as defined in
* <a href="https://en.litecoin.it/wiki/Protocol_specification">the protocol specification</a>.</p>
*
* <p>To be able to serialize and deserialize new Message subclasses the following criteria needs to be met.</p>
*
* <ul>
* <li>The proper Class instance needs to be mapped to it's message name in the names variable below</li>
* <li>There needs to be a constructor matching: NetworkParameters params, byte[] payload</li>
* <li>Message.litecoinSerializeToStream() needs to be properly subclassed</li>
* </ul>
*/
public class LitecoinSerializer {
private static final Logger log = LoggerFactory.getLogger(LitecoinSerializer.class);
private static final int COMMAND_LEN = 12;
private NetworkParameters params;
private boolean parseLazy = false;
private boolean parseRetain = false;
private static Map<Class<? extends Message>, String> names = new HashMap<Class<? extends Message>, String>();
static {
names.put(VersionMessage.class, "version");
names.put(InventoryMessage.class, "inv");
names.put(Block.class, "block");
names.put(GetDataMessage.class, "getdata");
names.put(Transaction.class, "tx");
names.put(AddressMessage.class, "addr");
names.put(Ping.class, "ping");
names.put(Pong.class, "pong");
names.put(VersionAck.class, "verack");
names.put(GetBlocksMessage.class, "getblocks");
names.put(GetHeadersMessage.class, "getheaders");
names.put(GetAddrMessage.class, "getaddr");
names.put(HeadersMessage.class, "headers");
names.put(BloomFilter.class, "filterload");
names.put(FilteredBlock.class, "merkleblock");
names.put(NotFoundMessage.class, "notfound");
names.put(MemoryPoolMessage.class, "mempool");
}
/**
* Constructs a LitecoinSerializer with the given behavior.
*
* @param params networkParams used to create Messages instances and termining packetMagic
*/
public LitecoinSerializer(NetworkParameters params) {
this(params, false, false);
}
/**
* Constructs a LitecoinSerializer with the given behavior.
*
* @param params networkParams used to create Messages instances and termining packetMagic
* @param parseLazy deserialize messages in lazy mode.
* @param parseRetain retain the backing byte array of a message for fast reserialization.
*/
public LitecoinSerializer(NetworkParameters params, boolean parseLazy, boolean parseRetain) {
this.params = params;
this.parseLazy = parseLazy;
this.parseRetain = parseRetain;
}
/**
* Writes message to to the output stream.
*/
public void serialize(Message message, OutputStream out) throws IOException {
String name = names.get(message.getClass());
if (name == null) {
throw new Error("LitecoinSerializer doesn't currently know how to serialize " + message.getClass());
}
byte[] header = new byte[4 + COMMAND_LEN + 4 + 4 /* checksum */];
uint32ToByteArrayBE(params.packetMagic, header, 0);
// The header array is initialized to zero by Java so we don't have to worry about
// NULL terminating the string here.
for (int i = 0; i < name.length() && i < COMMAND_LEN; i++) {
header[4 + i] = (byte) (name.codePointAt(i) & 0xFF);
}
byte[] payload = message.litecoinSerialize();
Utils.uint32ToByteArrayLE(payload.length, header, 4 + COMMAND_LEN);
byte[] checksum = message.getChecksum();
if (checksum == null) {
Sha256Hash msgHash = message.getHash();
if (msgHash != null && message instanceof Transaction) {
// if the message happens to have a precalculated hash use
// it.
// reverse copying 4 bytes is about 1600 times faster than
// calculating a new hash
// this is only possible for transactions as block hashes
// are hashes of the header only
byte[] hash = msgHash.getBytes();
int start = 4 + COMMAND_LEN + 4;
for (int i = start; i < start + 4; i++)
header[i] = hash[31 - i + start];
} else {
byte[] hash = doubleDigest(payload);
System.arraycopy(hash, 0, header, 4 + COMMAND_LEN + 4, 4);
}
} else {
System.arraycopy(checksum, 0, header, 4 + COMMAND_LEN + 4, 4);
}
out.write(header);
out.write(payload);
if (log.isDebugEnabled())
log.debug("Sending {} message: {}", name, bytesToHexString(header) + bytesToHexString(payload));
}
/**
* Reads a message from the given InputStream and returns it.
*/
public Message deserialize(InputStream in) throws ProtocolException, IOException {
// A Litecoin protocol message has the following format.
//
// - 4 byte magic number: 0xfabfb5da for the testnet or
// 0xf9beb4d9 for production
// - 12 byte command in ASCII
// - 4 byte payload size
// - 4 byte checksum
// - Payload data
//
// The checksum is the first 4 bytes of a SHA256 hash of the message payload. It isn't
// present for all messages, notably, the first one on a connection.
//
// Satoshi's implementation ignores garbage before the magic header bytes. We have to do the same because
// sometimes it sends us stuff that isn't part of any message.
seekPastMagicBytes(in);
LitecoinPacketHeader header = new LitecoinPacketHeader(in);
// Now try to read the whole message.
return deserializePayload(header, in);
}
/**
* Deserializes only the header in case packet meta data is needed before decoding
* the payload. This method assumes you have already called seekPastMagicBytes()
*/
public LitecoinPacketHeader deserializeHeader(InputStream in) throws ProtocolException, IOException {
return new LitecoinPacketHeader(in);
}
/**
* Deserialize payload only. You must provide a header, typically obtained by calling
* {@link LitecoinSerializer#deserializeHeader}.
*/
public Message deserializePayload(LitecoinPacketHeader header, InputStream in) throws ProtocolException, IOException {
int readCursor = 0;
byte[] payloadBytes = new byte[header.size];
while (readCursor < payloadBytes.length - 1) {
int bytesRead = in.read(payloadBytes, readCursor, header.size - readCursor);
if (bytesRead == -1) {
throw new IOException("Socket is disconnected");
}
readCursor += bytesRead;
}
// Verify the checksum.
byte[] hash;
hash = doubleDigest(payloadBytes);
if (header.checksum[0] != hash[0] || header.checksum[1] != hash[1] ||
header.checksum[2] != hash[2] || header.checksum[3] != hash[3]) {
throw new ProtocolException("Checksum failed to verify, actual " +
bytesToHexString(hash) +
" vs " + bytesToHexString(header.checksum));
}
if (log.isDebugEnabled()) {
log.debug("Received {} byte '{}' message: {}", new Object[]{
header.size,
header.command,
Utils.bytesToHexString(payloadBytes)
});
}
try {
return makeMessage(header.command, header.size, payloadBytes, hash, header.checksum);
} catch (Exception e) {
throw new ProtocolException("Error deserializing message " + Utils.bytesToHexString(payloadBytes) + "\n", e);
}
}
private Message makeMessage(String command, int length, byte[] payloadBytes, byte[] hash, byte[] checksum) throws ProtocolException {
// We use an if ladder rather than reflection because reflection is very slow on Android.
Message message;
if (command.equals("version")) {
return new VersionMessage(params, payloadBytes);
} else if (command.equals("inv")) {
message = new InventoryMessage(params, payloadBytes, parseLazy, parseRetain, length);
} else if (command.equals("block")) {
message = new Block(params, payloadBytes, parseLazy, parseRetain, length);
} else if (command.equals("merkleblock")) {
message = new FilteredBlock(params, payloadBytes);
} else if (command.equals("getdata")) {
message = new GetDataMessage(params, payloadBytes, parseLazy, parseRetain, length);
} else if (command.equals("tx")) {
Transaction tx = new Transaction(params, payloadBytes, null, parseLazy, parseRetain, length);
if (hash != null)
tx.setHash(new Sha256Hash(Utils.reverseBytes(hash)));
message = tx;
} else if (command.equals("addr")) {
message = new AddressMessage(params, payloadBytes, parseLazy, parseRetain, length);
} else if (command.equals("ping")) {
message = new Ping(params, payloadBytes);
} else if (command.equals("pong")) {
message = new Pong(params, payloadBytes);
} else if (command.equals("verack")) {
return new VersionAck(params, payloadBytes);
} else if (command.equals("headers")) {
return new HeadersMessage(params, payloadBytes);
} else if (command.equals("alert")) {
return new AlertMessage(params, payloadBytes);
} else if (command.equals("filterload")) {
return new BloomFilter(params, payloadBytes);
} else if (command.equals("notfound")) {
return new NotFoundMessage(params, payloadBytes);
} else if (command.equals("mempool")) {
return new MemoryPoolMessage();
} else {
log.warn("No support for deserializing message with name {}", command);
return new UnknownMessage(params, command, payloadBytes);
}
if (checksum != null)
message.setChecksum(checksum);
return message;
}
public void seekPastMagicBytes(InputStream in) throws IOException {
int magicCursor = 3; // Which byte of the magic we're looking for currently.
while (true) {
int b = in.read(); // Read a byte.
if (b == -1) {
// There's no more data to read.
throw new IOException("Socket is disconnected");
}
// We're looking for a run of bytes that is the same as the packet magic but we want to ignore partial
// magics that aren't complete. So we keep track of where we're up to with magicCursor.
int expectedByte = 0xFF & (int) (params.packetMagic >>> (magicCursor * 8));
if (b == expectedByte) {
magicCursor--;
if (magicCursor < 0) {
// We found the magic sequence.
return;
} else {
// We still have further to go to find the next message.
}
} else {
magicCursor = 3;
}
}
}
/**
* Whether the serializer will produce lazy parse mode Messages
*/
public boolean isParseLazyMode() {
return parseLazy;
}
/**
* Whether the serializer will produce cached mode Messages
*/
public boolean isParseRetainMode() {
return parseRetain;
}
public static class LitecoinPacketHeader {
public final byte[] header;
public final String command;
public final int size;
public final byte[] checksum;
public LitecoinPacketHeader(InputStream in) throws ProtocolException, IOException {
header = new byte[COMMAND_LEN + 4 + 4];
int readCursor = 0;
while (readCursor < header.length) {
int bytesRead = in.read(header, readCursor, header.length - readCursor);
if (bytesRead == -1) {
// There's no more data to read.
throw new IOException("Incomplete packet in underlying stream");
}
readCursor += bytesRead;
}
int cursor = 0;
// The command is a NULL terminated string, unless the command fills all twelve bytes
// in which case the termination is implicit.
int mark = cursor;
for (; header[cursor] != 0 && cursor - mark < COMMAND_LEN; cursor++) ;
byte[] commandBytes = new byte[cursor - mark];
System.arraycopy(header, mark, commandBytes, 0, cursor - mark);
try {
command = new String(commandBytes, "US-ASCII");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e); // Cannot happen.
}
cursor = mark + COMMAND_LEN;
size = (int) readUint32(header, cursor);
cursor += 4;
if (size > Message.MAX_SIZE)
throw new ProtocolException("Message size too large: " + size);
// Old clients don't send the checksum.
checksum = new byte[4];
// Note that the size read above includes the checksum bytes.
System.arraycopy(header, cursor, checksum, 0, 4);
cursor += 4;
}
}
}
| |
/*
* Copyright 2016 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im.vector.util;
import android.net.Uri;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.ParcelableSpan;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.style.ClickableSpan;
import android.view.View;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.MXPatterns;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import im.vector.listeners.IMessagesAdapterActionsListener;
// Class to track some matrix items click
public class MatrixURLSpan extends ClickableSpan implements ParcelableSpan {
private static final String LOG_TAG = MatrixURLSpan.class.getSimpleName();
public static final Parcelable.Creator<MatrixURLSpan> CREATOR = new Parcelable.Creator<MatrixURLSpan>() {
@Override
public MatrixURLSpan createFromParcel(Parcel source) {
return new MatrixURLSpan(source);
}
@Override
public MatrixURLSpan[] newArray(int size) {
return new MatrixURLSpan[size];
}
};
// the URL to track
private final String mURL;
// URL regex
private final Pattern mPattern;
// is a tombstone link
private final boolean isTombstone;
// SenderId for the tombstone link
private final String senderId;
// listener
private final IMessagesAdapterActionsListener mActionsListener;
public MatrixURLSpan(String url, Pattern pattern, IMessagesAdapterActionsListener actionsListener) {
mURL = url;
mPattern = pattern;
isTombstone = false;
senderId = null;
mActionsListener = actionsListener;
}
/**
* Create a URL Span for tombstone
*
* @param roomId
* @param senderId
* @param actionsListener
*/
public MatrixURLSpan(String roomId, String senderId, IMessagesAdapterActionsListener actionsListener) {
mURL = roomId;
mPattern = null;
isTombstone = true;
this.senderId = senderId;
mActionsListener = actionsListener;
}
private MatrixURLSpan(Parcel src) {
mURL = src.readString();
mPattern = null;
isTombstone = false;
senderId = null;
mActionsListener = null;
}
public int describeContents() {
return 0;
}
@Override
public int getSpanTypeId() {
return getSpanTypeIdInternal();
}
/*
* *********************************************************************************************
* Inherited from ParcelableSpan
* *********************************************************************************************
*/
public int getSpanTypeIdInternal() {
return getClass().hashCode();
}
public void writeToParcel(Parcel dest, int flags) {
writeToParcelInternal(dest, flags);
}
public void writeToParcelInternal(Parcel dest, int flags) {
dest.writeString(mURL);
}
/*
* *********************************************************************************************
* Custom methods
* *********************************************************************************************
*/
private String getURL() {
return mURL;
}
@Override
public void onClick(View widget) {
try {
if (isTombstone) {
if (null != mActionsListener) {
mActionsListener.onTombstoneLinkClicked(mURL, senderId);
}
} else {
if (mPattern == MXPatterns.PATTERN_CONTAIN_MATRIX_USER_IDENTIFIER) {
if (null != mActionsListener) {
mActionsListener.onMatrixUserIdClick(mURL);
}
} else if (mPattern == MXPatterns.PATTERN_CONTAIN_MATRIX_ALIAS) {
if (null != mActionsListener) {
mActionsListener.onRoomAliasClick(mURL);
}
} else if (mPattern == MXPatterns.PATTERN_CONTAIN_MATRIX_ROOM_IDENTIFIER) {
if (null != mActionsListener) {
mActionsListener.onRoomIdClick(mURL);
}
} else if (mPattern == MXPatterns.PATTERN_CONTAIN_MATRIX_EVENT_IDENTIFIER) {
if (null != mActionsListener) {
mActionsListener.onEventIdClick(mURL);
}
} else if (mPattern == MXPatterns.PATTERN_CONTAIN_MATRIX_GROUP_IDENTIFIER) {
if (null != mActionsListener) {
mActionsListener.onGroupIdClick(mURL);
}
} else {
Uri uri = Uri.parse(getURL());
if (null != mActionsListener) {
mActionsListener.onURLClick(uri);
} else {
ExternalApplicationsUtilKt.openUrlInExternalBrowser(widget.getContext(), uri);
}
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "MatrixURLSpan : on click failed " + e.getLocalizedMessage(), e);
}
}
/**
* Find the matrix spans i.e matrix id , user id ... to display them as URL.
*
* @param stringBuilder the text in which the matrix items has to be clickable.
*/
public static void refreshMatrixSpans(SpannableStringBuilder stringBuilder, IMessagesAdapterActionsListener mActionsListener) {
// sanity checks
if ((null == stringBuilder) || (0 == stringBuilder.length())) {
return;
}
String text = stringBuilder.toString();
for (int index = 0; index < MXPatterns.MATRIX_PATTERNS.size(); index++) {
Pattern pattern = MXPatterns.MATRIX_PATTERNS.get(index);
// room id.
Matcher matcher = pattern.matcher(stringBuilder);
while (matcher.find()) {
try {
int startPos = matcher.start(0);
if ((startPos == 0) || (text.charAt(startPos - 1) != '/')) {
int endPos = matcher.end(0);
String url = text.substring(matcher.start(0), matcher.end(0));
stringBuilder.setSpan(new MatrixURLSpan(url, pattern, mActionsListener), startPos, endPos, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
} catch (Exception e) {
Log.e(LOG_TAG, "refreshMatrixSpans " + e.getLocalizedMessage(), e);
}
}
}
}
}
| |
package br.gov.siscomex.portalunico.cct_ext.model;
import java.math.BigDecimal;
import javax.validation.Valid;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModelProperty;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "CargaDTO", propOrder =
{ "numeroDue", "numeroRuc", "codigoTipoEmbalagem", "descricaoTipoEmbalagem", "codigoTipoGranel", "descricaoTipoGranel", "siglaUnidadeMedida", "quantidadeManifestada", "descricaoCarga", "pesoBruto"
})
@XmlRootElement(name="CargaDTO")
public class CargaDTO {
@XmlElement(name="numeroDue")
@ApiModelProperty(value = "")
private String numeroDue = null;
@XmlElement(name="numeroRuc")
@ApiModelProperty(value = "")
private String numeroRuc = null;
@XmlElement(name="codigoTipoEmbalagem")
@ApiModelProperty(value = "")
private Integer codigoTipoEmbalagem = null;
@XmlElement(name="descricaoTipoEmbalagem")
@ApiModelProperty(value = "")
private String descricaoTipoEmbalagem = null;
@XmlElement(name="codigoTipoGranel")
@ApiModelProperty(value = "")
private Integer codigoTipoGranel = null;
@XmlElement(name="descricaoTipoGranel")
@ApiModelProperty(value = "")
private String descricaoTipoGranel = null;
@XmlElement(name="siglaUnidadeMedida")
@ApiModelProperty(value = "")
private String siglaUnidadeMedida = null;
@XmlElement(name="quantidadeManifestada")
@ApiModelProperty(value = "")
@Valid
private BigDecimal quantidadeManifestada = null;
@XmlElement(name="descricaoCarga")
@ApiModelProperty(value = "")
private String descricaoCarga = null;
@XmlElement(name="pesoBruto")
@ApiModelProperty(value = "")
@Valid
private BigDecimal pesoBruto = null;
/**
* Get numeroDue
* @return numeroDue
**/
@JsonProperty("numeroDue")
public String getNumeroDue() {
return numeroDue;
}
public void setNumeroDue(String numeroDue) {
this.numeroDue = numeroDue;
}
public CargaDTO numeroDue(String numeroDue) {
this.numeroDue = numeroDue;
return this;
}
/**
* Get numeroRuc
* @return numeroRuc
**/
@JsonProperty("numeroRuc")
public String getNumeroRuc() {
return numeroRuc;
}
public void setNumeroRuc(String numeroRuc) {
this.numeroRuc = numeroRuc;
}
public CargaDTO numeroRuc(String numeroRuc) {
this.numeroRuc = numeroRuc;
return this;
}
/**
* Get codigoTipoEmbalagem
* @return codigoTipoEmbalagem
**/
@JsonProperty("codigoTipoEmbalagem")
public Integer getCodigoTipoEmbalagem() {
return codigoTipoEmbalagem;
}
public void setCodigoTipoEmbalagem(Integer codigoTipoEmbalagem) {
this.codigoTipoEmbalagem = codigoTipoEmbalagem;
}
public CargaDTO codigoTipoEmbalagem(Integer codigoTipoEmbalagem) {
this.codigoTipoEmbalagem = codigoTipoEmbalagem;
return this;
}
/**
* Get descricaoTipoEmbalagem
* @return descricaoTipoEmbalagem
**/
@JsonProperty("descricaoTipoEmbalagem")
public String getDescricaoTipoEmbalagem() {
return descricaoTipoEmbalagem;
}
public void setDescricaoTipoEmbalagem(String descricaoTipoEmbalagem) {
this.descricaoTipoEmbalagem = descricaoTipoEmbalagem;
}
public CargaDTO descricaoTipoEmbalagem(String descricaoTipoEmbalagem) {
this.descricaoTipoEmbalagem = descricaoTipoEmbalagem;
return this;
}
/**
* Get codigoTipoGranel
* @return codigoTipoGranel
**/
@JsonProperty("codigoTipoGranel")
public Integer getCodigoTipoGranel() {
return codigoTipoGranel;
}
public void setCodigoTipoGranel(Integer codigoTipoGranel) {
this.codigoTipoGranel = codigoTipoGranel;
}
public CargaDTO codigoTipoGranel(Integer codigoTipoGranel) {
this.codigoTipoGranel = codigoTipoGranel;
return this;
}
/**
* Get descricaoTipoGranel
* @return descricaoTipoGranel
**/
@JsonProperty("descricaoTipoGranel")
public String getDescricaoTipoGranel() {
return descricaoTipoGranel;
}
public void setDescricaoTipoGranel(String descricaoTipoGranel) {
this.descricaoTipoGranel = descricaoTipoGranel;
}
public CargaDTO descricaoTipoGranel(String descricaoTipoGranel) {
this.descricaoTipoGranel = descricaoTipoGranel;
return this;
}
/**
* Get siglaUnidadeMedida
* @return siglaUnidadeMedida
**/
@JsonProperty("siglaUnidadeMedida")
public String getSiglaUnidadeMedida() {
return siglaUnidadeMedida;
}
public void setSiglaUnidadeMedida(String siglaUnidadeMedida) {
this.siglaUnidadeMedida = siglaUnidadeMedida;
}
public CargaDTO siglaUnidadeMedida(String siglaUnidadeMedida) {
this.siglaUnidadeMedida = siglaUnidadeMedida;
return this;
}
/**
* Get quantidadeManifestada
* @return quantidadeManifestada
**/
@JsonProperty("quantidadeManifestada")
public BigDecimal getQuantidadeManifestada() {
return quantidadeManifestada;
}
public void setQuantidadeManifestada(BigDecimal quantidadeManifestada) {
this.quantidadeManifestada = quantidadeManifestada;
}
public CargaDTO quantidadeManifestada(BigDecimal quantidadeManifestada) {
this.quantidadeManifestada = quantidadeManifestada;
return this;
}
/**
* Get descricaoCarga
* @return descricaoCarga
**/
@JsonProperty("descricaoCarga")
public String getDescricaoCarga() {
return descricaoCarga;
}
public void setDescricaoCarga(String descricaoCarga) {
this.descricaoCarga = descricaoCarga;
}
public CargaDTO descricaoCarga(String descricaoCarga) {
this.descricaoCarga = descricaoCarga;
return this;
}
/**
* Get pesoBruto
* @return pesoBruto
**/
@JsonProperty("pesoBruto")
public BigDecimal getPesoBruto() {
return pesoBruto;
}
public void setPesoBruto(BigDecimal pesoBruto) {
this.pesoBruto = pesoBruto;
}
public CargaDTO pesoBruto(BigDecimal pesoBruto) {
this.pesoBruto = pesoBruto;
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CargaDTO {\n");
sb.append(" numeroDue: ").append(toIndentedString(numeroDue)).append("\n");
sb.append(" numeroRuc: ").append(toIndentedString(numeroRuc)).append("\n");
sb.append(" codigoTipoEmbalagem: ").append(toIndentedString(codigoTipoEmbalagem)).append("\n");
sb.append(" descricaoTipoEmbalagem: ").append(toIndentedString(descricaoTipoEmbalagem)).append("\n");
sb.append(" codigoTipoGranel: ").append(toIndentedString(codigoTipoGranel)).append("\n");
sb.append(" descricaoTipoGranel: ").append(toIndentedString(descricaoTipoGranel)).append("\n");
sb.append(" siglaUnidadeMedida: ").append(toIndentedString(siglaUnidadeMedida)).append("\n");
sb.append(" quantidadeManifestada: ").append(toIndentedString(quantidadeManifestada)).append("\n");
sb.append(" descricaoCarga: ").append(toIndentedString(descricaoCarga)).append("\n");
sb.append(" pesoBruto: ").append(toIndentedString(pesoBruto)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private static String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
package org.apache.cordova.firebase;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.app.Notification;
import android.text.TextUtils;
import android.content.ContentResolver;
import android.graphics.Color;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
import java.util.List;
import java.util.Map;
import java.util.Random;
public class FirebasePluginMessagingService extends FirebaseMessagingService {
private static final String TAG = "FirebasePlugin";
/**
* Get a string from resources without importing the .R package
*
* @param name Resource Name
* @return Resource
*/
private String getStringResource(String name) {
return this.getString(
this.getResources().getIdentifier(
name, "string", this.getPackageName()
)
);
}
/**
* Called when message is received.
*
* @param remoteMessage Object representing the message received from Firebase Cloud Messaging.
*/
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
// [START_EXCLUDE]
// There are two types of messages data messages and notification messages. Data messages are handled
// here in onMessageReceived whether the app is in the foreground or background. Data messages are the type
// traditionally used with GCM. Notification messages are only received here in onMessageReceived when the app
// is in the foreground. When the app is in the background an automatically generated notification is displayed.
// When the user taps on the notification they are returned to the app. Messages containing both notification
// and data payloads are treated as notification messages. The Firebase console always sends notification
// messages. For more see: https://firebase.google.com/docs/cloud-messaging/concept-options
// [END_EXCLUDE]
// Pass the message to the receiver manager so any registered receivers can decide to handle it
boolean wasHandled = FirebasePluginMessageReceiverManager.onMessageReceived(remoteMessage);
if (wasHandled) {
Log.d(TAG, "Message was handled by a registered receiver");
// Don't process the message in this method.
return;
}
// TODO(developer): Handle FCM messages here.
// Not getting messages here? See why this may be: https://goo.gl/39bRNJ
String title = "";
String text = "";
String id = "";
String sound = "";
String lights = "";
Map<String, String> data = remoteMessage.getData();
if (remoteMessage.getNotification() != null) {
title = remoteMessage.getNotification().getTitle();
text = remoteMessage.getNotification().getBody();
id = remoteMessage.getMessageId();
} else if (data != null) {
title = data.get("title");
text = data.get("text");
id = data.get("id");
sound = data.get("sound");
lights = data.get("lights"); //String containing hex ARGB color, miliseconds on, miliseconds off, example: '#FFFF00FF,1000,3000'
if (TextUtils.isEmpty(text)) {
text = data.get("body");
}
}
if (TextUtils.isEmpty(id)) {
Random rand = new Random();
int n = rand.nextInt(50) + 1;
id = Integer.toString(n);
}
Log.d(TAG, "From: " + remoteMessage.getFrom());
Log.d(TAG, "Notification Message id: " + id);
Log.d(TAG, "Notification Message Title: " + title);
Log.d(TAG, "Notification Message Body/Text: " + text);
Log.d(TAG, "Notification Message Sound: " + sound);
Log.d(TAG, "Notification Message Lights: " + lights);
// TODO: Add option to developer to configure if show notification when app on foreground
if (!TextUtils.isEmpty(text) || !TextUtils.isEmpty(title) || (data != null && !data.isEmpty())) {
boolean showNotification = (FirebasePlugin.inBackground() || !FirebasePlugin.hasNotificationsCallback()) && (!TextUtils.isEmpty(text) || !TextUtils.isEmpty(title));
sendNotification(id, title, text, data, showNotification, sound, lights);
}
}
private void sendNotification(String id, String title, String messageBody, Map<String, String> data, boolean showNotification, String sound, String lights) {
Bundle bundle = new Bundle();
for (String key : data.keySet()) {
bundle.putString(key, data.get(key));
}
if (showNotification) {
Intent intent = new Intent(this, OnNotificationOpenReceiver.class);
intent.putExtras(bundle);
PendingIntent pendingIntent = PendingIntent.getBroadcast(this, id.hashCode(), intent, PendingIntent.FLAG_UPDATE_CURRENT);
String channelId = this.getStringResource("default_notification_channel_id");
String channelName = this.getStringResource("default_notification_channel_name");
Uri defaultSoundUri = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this, channelId);
notificationBuilder
.setContentTitle(title)
.setContentText(messageBody)
.setVisibility(NotificationCompat.VISIBILITY_PUBLIC)
.setStyle(new NotificationCompat.BigTextStyle().bigText(messageBody))
.setAutoCancel(true)
.setSound(defaultSoundUri)
.setContentIntent(pendingIntent)
.setPriority(NotificationCompat.PRIORITY_MAX);
int resID = getResources().getIdentifier("notification_icon", "drawable", getPackageName());
if (resID != 0) {
notificationBuilder.setSmallIcon(resID);
} else {
notificationBuilder.setSmallIcon(getApplicationInfo().icon);
}
if (sound != null) {
Log.d(TAG, "sound before path is: " + sound);
Uri soundPath = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://" + getPackageName() + "/raw/" + sound);
Log.d(TAG, "Parsed sound is: " + soundPath.toString());
notificationBuilder.setSound(soundPath);
} else {
Log.d(TAG, "Sound was null ");
}
int lightArgb = 0;
if (lights != null) {
try {
String[] lightsComponents = lights.replaceAll("\\s", "").split(",");
if (lightsComponents.length == 3) {
lightArgb = Color.parseColor(lightsComponents[0]);
int lightOnMs = Integer.parseInt(lightsComponents[1]);
int lightOffMs = Integer.parseInt(lightsComponents[2]);
notificationBuilder.setLights(lightArgb, lightOnMs, lightOffMs);
}
} catch (Exception e) {
}
}
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
int accentID = getResources().getIdentifier("accent", "color", getPackageName());
notificationBuilder.setColor(getResources().getColor(accentID, null));
}
Notification notification = notificationBuilder.build();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
int iconID = android.R.id.icon;
int notiID = getResources().getIdentifier("notification_big", "drawable", getPackageName());
if (notification.contentView != null) {
notification.contentView.setImageViewResource(iconID, notiID);
}
}
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
// Since android Oreo notification channel is needed.
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
List<NotificationChannel> channels = notificationManager.getNotificationChannels();
boolean channelExists = false;
for (int i = 0; i < channels.size(); i++) {
if (channelId.equals(channels.get(i).getId())) {
channelExists = true;
}
}
if (!channelExists) {
NotificationChannel channel = new NotificationChannel(channelId, channelName, NotificationManager.IMPORTANCE_HIGH);
channel.enableLights(true);
channel.enableVibration(true);
channel.setShowBadge(true);
if (lights != null) {
channel.setLightColor(lightArgb);
}
notificationManager.createNotificationChannel(channel);
}
}
notificationManager.notify(id.hashCode(), notification);
} else {
bundle.putBoolean("tap", false);
bundle.putString("title", title);
bundle.putString("body", messageBody);
FirebasePlugin.sendNotification(bundle, this.getApplicationContext());
}
}
}
| |
package frostillicus;
import java.util.*;
import java.io.Serializable;
import com.ibm.xsp.extlib.component.dynamicview.UIDynamicViewPanel;
import com.ibm.xsp.model.*;
import lombok.Delegate;
import lotus.domino.*;
@SuppressWarnings("unchecked")
public class SortableMapView extends TabularDataModel implements Serializable, TabularDataSource, List<Map<String, Comparable>> {
private static final long serialVersionUID = 3475977562822554265L;
UIDynamicViewPanel foo;
// May as well make it List-compatible
private interface MapList extends List<Map<String, Comparable>> { }
private interface MapListExcludes { public boolean add(Map<String, Comparable> element); }
@Delegate(types=MapList.class, excludes=MapListExcludes.class)
private List<Map<String, Comparable>> data = new ArrayList<Map<String, Comparable>>();
private List<Map<String, Comparable>> originalData = null;
private String sortColumn = "";
private String sortOrder = "";
public SortableMapView() { super(); }
public SortableMapView(View view) throws NotesException {
ViewEntryCollection entries = view.getAllEntries();
processEntryCollection(entries);
entries.recycle();
}
public SortableMapView(ViewEntryCollection entries) throws NotesException {
processEntryCollection(entries);
}
public SortableMapView(DocumentCollection docs, List<String> columnNames) throws NotesException {
Document doc = docs.getFirstDocument();
while(doc != null) {
Map<String, Comparable> result = new HashMap<String, Comparable>();
for(String columnInfo : columnNames) {
int index = columnInfo.indexOf("|");
String columnName = index > -1 ? columnInfo.substring(index+1) : columnInfo;
Item item = doc.getFirstItem(columnName);
if(item == null) {
result.put(columnName, "");
} else {
// Look at the item's type to see if it's suitable for Comparable use
// For now, this means single numbers or dates - otherwise, it's boiled down to a string
switch(item.getType()) {
case Item.DATETIMES:
// No one uses date/time ranges, right? Of course not!
List<DateTime> dtValues = item.getValueDateTimeArray();
if(dtValues.size() > 1) {
// Eh, just make it a string
result.put(columnName, item.getText());
for(DateTime dt : dtValues) { dt.recycle(); }
} else {
DateTime dt = dtValues.get(0);
result.put(columnName, dt.toJavaDate());
dt.recycle();
}
break;
case Item.NUMBERS:
List<Double> doubleValues = item.getValues();
if(doubleValues.size() > 1) {
result.put(columnName, item.getText());
} else {
result.put(columnName, item.getValueDouble());
}
default:
result.put(columnName, item.getText());
break;
}
item.recycle();
}
}
result.put("documentId", doc.getUniversalID());
this.add(result);
Document tempDoc = doc;
doc = docs.getNextDocument();
tempDoc.recycle();
}
}
public boolean add(Map<String, Comparable> element) {
return this.data.add(new FakeEntryData(element));
}
public boolean add(ViewEntry entry, Map<Integer, String> columnNameMap) throws NotesException {
Map<String, Comparable> result = new HashMap<String, Comparable>();
result.put("universalID", entry.getUniversalID());
entry.setPreferJavaDates(true);
List<Object> columnValues = entry.getColumnValues();
for(int i = 0; i < columnValues.size(); i++) {
if(columnValues.get(i) instanceof Comparable) {
result.put(columnNameMap.get(i), (Comparable)columnValues.get(i));
} else {
result.put(columnNameMap.get(i), columnValues.get(i).toString());
}
}
return this.add(result);
}
public List<String> getColumns() {
Map<String, Comparable> currentRow = (Map<String, Comparable>)this.getRowData();
return new ArrayList<String>(currentRow.keySet());
}
@Override
public int getRowCount() { return this.data.size(); }
@Override
public Object getRowData() { return this.data.get(this.getRowIndex()); }
@Override
public boolean isColumnSortable(String paramString) { return true; }
@Override
public int getResortType(String paramString) { return TabularDataModel.RESORT_BOTH; }
@Override
public void setResortOrder(String columnName, String sortOrder) {
// If this is our first sort, copy the original order
if(this.originalData == null) {
this.originalData = new ArrayList<Map<String, Comparable>>(this.data);
}
if(!columnName.equals(this.sortColumn)) {
// Switching columns means switch back to ascending by default
this.sortOrder = sortOrder.equals("descending") ? "descending" : "ascending";
Collections.sort(this.data, new MapComparator(columnName, true));
this.sortColumn = columnName;
} else {
this.sortColumn = columnName;
if(sortOrder.equals("ascending") || (sortOrder.equals("toggle") && this.sortOrder.length() == 0)) {
this.sortOrder = "ascending";
Collections.sort(this.data, new MapComparator(columnName, true));
} else if(sortOrder.equals("descending") || (sortOrder.equals("toggle") && this.sortOrder.equals("ascending"))) {
this.sortOrder = "descending";
Collections.sort(this.data, new MapComparator(columnName, false));
} else {
this.sortOrder = "";
this.data = new ArrayList<Map<String, Comparable>>(this.originalData);
}
}
}
@Override
public int getResortState(String paramString) {
return this.sortOrder.equals("ascending") ? TabularDataModel.RESORT_ASCENDING :
this.sortOrder.equals("descending") ? TabularDataModel.RESORT_DESCENDING :
TabularDataModel.RESORT_NONE;
}
@Override
public String getResortColumn() {
return this.sortColumn;
}
private void processEntryCollection(ViewEntryCollection entries) throws NotesException {
View view = entries.getParent();
Map<Integer, String> columnNameMap = new HashMap<Integer, String>();
for(ViewColumn column : (List<ViewColumn>)view.getColumns()) {
if(column.getColumnValuesIndex() < 65535) {
columnNameMap.put(column.getColumnValuesIndex(), column.getItemName());
}
}
ViewEntry entry = entries.getFirstEntry();
while(entry != null) {
this.add(entry, columnNameMap);
ViewEntry tempEntry = entry;
entry = entries.getNextEntry(entry);
tempEntry.recycle();
}
entries.recycle();
}
// View Panels know how to deal with ViewRowData better than Maps, apparently, so just pass through
// the ViewRowData methods to their Map equivalents
private class FakeEntryData extends HashMap<String, Comparable> implements ViewRowData {
private static final long serialVersionUID = 5946100397649532083L;
private String universalID;
public FakeEntryData() { super(); }
public FakeEntryData(Map<String, Comparable> original) { super(original); }
public Object getColumnValue(String arg0) { return this.get(arg0); }
public Object getValue(String arg0) { return this.get(arg0); }
public ColumnInfo getColumnInfo(String arg0) { return null; }
public String getOpenPageURL(String arg0, boolean arg1) { return null; }
public boolean isReadOnly(String arg0) { return false; }
public String getUniversalID() { return this.universalID; }
public void setUniversalID(String universalID) { this.universalID = universalID; }
public void setColumnValue(String arg0, Object arg1) {
if(!(arg1 instanceof Comparable)) {
this.put(arg0, arg1.toString());
} else {
this.put(arg0, (Comparable)arg1);
}
}
}
// A basic class to compare two Maps by a given comparable key common in each,
// allowing for descending order
private class MapComparator implements Comparator<Map<String, Comparable>> {
private String key;
private boolean ascending;
public MapComparator(String key, boolean ascending) {
this.key = key;
this.ascending = ascending;
}
public int compare(Map<String, Comparable> o1, Map<String, Comparable> o2) {
return (ascending ? 1 : -1) * o1.get(key).compareTo(o2.get(key));
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.datamigration.models;
import com.azure.core.annotation.Immutable;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import java.time.OffsetDateTime;
import java.util.List;
/** The MigrateSqlServerSqlMITaskOutputMigrationLevel model. */
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "resultType")
@JsonTypeName("MigrationLevelOutput")
@Immutable
public final class MigrateSqlServerSqlMITaskOutputMigrationLevel extends MigrateSqlServerSqlMITaskOutput {
@JsonIgnore
private final ClientLogger logger = new ClientLogger(MigrateSqlServerSqlMITaskOutputMigrationLevel.class);
/*
* Migration start time
*/
@JsonProperty(value = "startedOn", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime startedOn;
/*
* Migration end time
*/
@JsonProperty(value = "endedOn", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime endedOn;
/*
* Current status of migration
*/
@JsonProperty(value = "status", access = JsonProperty.Access.WRITE_ONLY)
private MigrationStatus status;
/*
* Current state of migration
*/
@JsonProperty(value = "state", access = JsonProperty.Access.WRITE_ONLY)
private MigrationState state;
/*
* Selected agent jobs as a map from name to id
*/
@JsonProperty(value = "agentJobs", access = JsonProperty.Access.WRITE_ONLY)
private String agentJobs;
/*
* Selected logins as a map from name to id
*/
@JsonProperty(value = "logins", access = JsonProperty.Access.WRITE_ONLY)
private String logins;
/*
* Migration progress message
*/
@JsonProperty(value = "message", access = JsonProperty.Access.WRITE_ONLY)
private String message;
/*
* Map of server role migration results.
*/
@JsonProperty(value = "serverRoleResults", access = JsonProperty.Access.WRITE_ONLY)
private String serverRoleResults;
/*
* List of orphaned users.
*/
@JsonProperty(value = "orphanedUsersInfo", access = JsonProperty.Access.WRITE_ONLY)
private List<OrphanedUserInfo> orphanedUsersInfo;
/*
* Selected databases as a map from database name to database id
*/
@JsonProperty(value = "databases", access = JsonProperty.Access.WRITE_ONLY)
private String databases;
/*
* Source server version
*/
@JsonProperty(value = "sourceServerVersion", access = JsonProperty.Access.WRITE_ONLY)
private String sourceServerVersion;
/*
* Source server brand version
*/
@JsonProperty(value = "sourceServerBrandVersion", access = JsonProperty.Access.WRITE_ONLY)
private String sourceServerBrandVersion;
/*
* Target server version
*/
@JsonProperty(value = "targetServerVersion", access = JsonProperty.Access.WRITE_ONLY)
private String targetServerVersion;
/*
* Target server brand version
*/
@JsonProperty(value = "targetServerBrandVersion", access = JsonProperty.Access.WRITE_ONLY)
private String targetServerBrandVersion;
/*
* Migration exceptions and warnings.
*/
@JsonProperty(value = "exceptionsAndWarnings", access = JsonProperty.Access.WRITE_ONLY)
private List<ReportableException> exceptionsAndWarnings;
/**
* Get the startedOn property: Migration start time.
*
* @return the startedOn value.
*/
public OffsetDateTime startedOn() {
return this.startedOn;
}
/**
* Get the endedOn property: Migration end time.
*
* @return the endedOn value.
*/
public OffsetDateTime endedOn() {
return this.endedOn;
}
/**
* Get the status property: Current status of migration.
*
* @return the status value.
*/
public MigrationStatus status() {
return this.status;
}
/**
* Get the state property: Current state of migration.
*
* @return the state value.
*/
public MigrationState state() {
return this.state;
}
/**
* Get the agentJobs property: Selected agent jobs as a map from name to id.
*
* @return the agentJobs value.
*/
public String agentJobs() {
return this.agentJobs;
}
/**
* Get the logins property: Selected logins as a map from name to id.
*
* @return the logins value.
*/
public String logins() {
return this.logins;
}
/**
* Get the message property: Migration progress message.
*
* @return the message value.
*/
public String message() {
return this.message;
}
/**
* Get the serverRoleResults property: Map of server role migration results.
*
* @return the serverRoleResults value.
*/
public String serverRoleResults() {
return this.serverRoleResults;
}
/**
* Get the orphanedUsersInfo property: List of orphaned users.
*
* @return the orphanedUsersInfo value.
*/
public List<OrphanedUserInfo> orphanedUsersInfo() {
return this.orphanedUsersInfo;
}
/**
* Get the databases property: Selected databases as a map from database name to database id.
*
* @return the databases value.
*/
public String databases() {
return this.databases;
}
/**
* Get the sourceServerVersion property: Source server version.
*
* @return the sourceServerVersion value.
*/
public String sourceServerVersion() {
return this.sourceServerVersion;
}
/**
* Get the sourceServerBrandVersion property: Source server brand version.
*
* @return the sourceServerBrandVersion value.
*/
public String sourceServerBrandVersion() {
return this.sourceServerBrandVersion;
}
/**
* Get the targetServerVersion property: Target server version.
*
* @return the targetServerVersion value.
*/
public String targetServerVersion() {
return this.targetServerVersion;
}
/**
* Get the targetServerBrandVersion property: Target server brand version.
*
* @return the targetServerBrandVersion value.
*/
public String targetServerBrandVersion() {
return this.targetServerBrandVersion;
}
/**
* Get the exceptionsAndWarnings property: Migration exceptions and warnings.
*
* @return the exceptionsAndWarnings value.
*/
public List<ReportableException> exceptionsAndWarnings() {
return this.exceptionsAndWarnings;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
@Override
public void validate() {
super.validate();
if (orphanedUsersInfo() != null) {
orphanedUsersInfo().forEach(e -> e.validate());
}
if (exceptionsAndWarnings() != null) {
exceptionsAndWarnings().forEach(e -> e.validate());
}
}
}
| |
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2014 GeoSolutions
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.stats;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import it.geosolutions.jaiext.range.Range;
import it.geosolutions.jaiext.range.RangeFactory;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* This test-class verifies that the Statistics object is able to compute the statistics correctly. For achieving this purpose, all the subclasses of
* the {@link Statistics} abstract class are tested. The first 3 tests compares the ability to calculate statistics of the subclasses. Then is checked
* the ability to accumulate data previously calculated, by calling the method accumulateStats(). Also is tested the capability of clearing the stored
* data. The last 7 tests evaluates if the subclasses are capable to throw an exception when they call the accumulateStats() method with another
* Statistics object different from their type.
*/
public class StatisticsTest {
/** Dimension of the random samples array */
private final static int ARRAY_DIMENSIONS = 100;
/** Tolerance value used for comparison between double */
private final static double TOLERANCE = 0.1d;
/** Random samples array */
private static double[] testArray;
/** Object used for calculating the mean */
private static Statistics meanObj;
/** Object used for calculating the sum */
private static Statistics sumObj;
/** Object used for calculating the maximum */
private static Statistics maxObj;
/** Object used for calculating the minimum */
private static Statistics minObj;
/** Object used for calculating the extrema */
private static Statistics extremaObj;
/** Object used for calculating the variance */
private static Statistics varianceObj;
/** Object used for calculating the standard deviation */
private static Statistics devstdObj;
/** Object used for calculating the histogram */
private static Statistics histogramObj;
/** Object used for calculating the mode */
private static Statistics modeObj;
/** Object used for calculating the median */
private static Statistics medianObj;
/** Minimum bound for complex statistics */
private static double minBound;
/** Maximum bound for complex statistics */
private static double maxBound;
/** Bin size for complex statistics */
private static double binInterval;
/** Bin number for complex statistics */
private static int numBins;
/** Values interval for complex statistics */
private static Range interval;
@BeforeClass
public static void initialSetup() {
// Creation of an array with random values
testArray = new double[ARRAY_DIMENSIONS];
// Definition of the Histogram parameters
minBound = -3;
maxBound = 3;
numBins = 20;
binInterval = (maxBound - minBound) / numBins;
interval = RangeFactory.create(minBound, true, maxBound, false, false);
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
testArray[i] = Math.random() * 2 + 2;
}
// Creation of the Statistics Object
meanObj = StatsFactory.createSimpleStatisticsObjectFromInt(0);
sumObj = StatsFactory.createSimpleStatisticsObjectFromInt(1);
maxObj = StatsFactory.createSimpleStatisticsObjectFromInt(2);
minObj = StatsFactory.createSimpleStatisticsObjectFromInt(3);
extremaObj = StatsFactory.createSimpleStatisticsObjectFromInt(4);
varianceObj = StatsFactory.createSimpleStatisticsObjectFromInt(5);
devstdObj = StatsFactory.createSimpleStatisticsObjectFromInt(6);
histogramObj = StatsFactory.createComplexStatisticsObjectFromInt(7, minBound, maxBound,
numBins);
modeObj = StatsFactory.createComplexStatisticsObjectFromInt(8, minBound, maxBound, numBins);
medianObj = StatsFactory.createComplexStatisticsObjectFromInt(9, minBound, maxBound,
numBins);
}
// Private method for calculating the bin/index related to the sample
private int getIndex(double sample) {
int index = (int) ((sample - minBound) / binInterval);
return index;
}
// This test is used for checking if the mean and sum objects
// have a correct behavior
@Test
public void testMeanAndSum() {
double mean = 0;
double sum = calculateSumMean(sumObj, meanObj);
// Comparison
double sum2 = (Double) (sumObj.getResult());
assertEquals(sum, sum2, TOLERANCE);
mean = sum / (ARRAY_DIMENSIONS - 1);
double mean2 = (Double) (meanObj.getResult());
assertEquals(mean, mean2, TOLERANCE);
}
// tests mean not using a "static" (ie. not computed by the class under test)
// as the expected value
@Test
public void testMeanCorrectness() {
MeanSum mean = new MeanSum(false);
mean.addSample(2);
mean.addSample(2);
assertEquals(2d, (Double)(mean.getResult()), TOLERANCE);
}
// This test is used for checking if the min, max and extrema objects
// have a correct behavior
@Test
public void testMinMaxExtrema() {
double[] minMax = calculateMaxMinExtrema(minObj, maxObj, extremaObj);
double min = minMax[0];
double max = minMax[1];
// Comparison
double[] array = (double[]) (extremaObj.getResult());
double max2 = array[1];
double min2 = array[0];
assertEquals(min, min2, TOLERANCE);
assertEquals(max, max2, TOLERANCE);
double min3 = (Double) (minObj.getResult());
assertEquals(min, min3, TOLERANCE);
double max3 = (Double) (maxObj.getResult());
assertEquals(max, max3, TOLERANCE);
}
// This test is used for checking if the variance and devStd objects
// have a correct behavior
@Test
public void testDevStdVariance() {
double mean = 0;
double variance = 0;
double std = 0;
double sum = calculateVarianceAndStd(varianceObj, devstdObj);
mean = sum / (ARRAY_DIMENSIONS - 1);
double sum2 = 0;
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
sum2 += Math.pow((testArray[i] - mean), 2);
}
// Comparison
variance = sum2 / (ARRAY_DIMENSIONS - 1);
double variance2 = (Double) (varianceObj.getResult());
assertEquals(variance, variance2, TOLERANCE);
std = Math.sqrt(variance);
double std2 = (Double) (devstdObj.getResult());
assertEquals(std, std2, TOLERANCE);
}
// This test is used for checking if the histogram,mode and median objects
// have a correct behavior
@Test
public void testHistModeMedian() {
double[] hist = new double[numBins];
List<Double> listData = new ArrayList<Double>();
double median = 0;
// Mean and sum calculation
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
if (interval.contains(testArray[i])) {
int index = getIndex(testArray[i]);
hist[index]++;
listData.add(testArray[i]);
}
modeObj.addSample(testArray[i]);
histogramObj.addSample(testArray[i]);
medianObj.addSample(testArray[i]);
}
// Selection of the median
Collections.sort(listData);
int listSize = listData.size();
if (listSize == 0) {
median = Double.NaN;
} else if (listSize == 1) {
median = listData.get(0);
} else {
int halfSize = listSize / 2;
double halfValue = listData.get(halfSize);
if (listData.size() % 2 == 1) {
median = halfValue;
} else {
median = (halfValue + listData.get(halfSize + 1)) / 2;
}
}
// Selection of the mode
double max = Double.NEGATIVE_INFINITY;
double indexMax = 0;
for (int i = 0; i < numBins; i++) {
if (hist[i] > max) {
max = hist[i];
indexMax = i;
}
}
if (max != 0) {
indexMax = indexMax + minBound;
}
// Comparison
double indexMax2 = (Double) (modeObj.getResult());
assertEquals(indexMax, indexMax2, TOLERANCE);
double[] hist2 = (double[]) (histogramObj.getResult());
for (int i = 0; i < numBins; i++) {
assertEquals(hist[i], hist2[i], TOLERANCE);
}
double median2 = (Double) (medianObj.getResult());
assertEquals(median, median2, TOLERANCE);
}
// This test is used for checking if the cumulation of the statistics continue to mantain
// correct results
@Test
public void testCumulativeStats() {
// Calculation of the statistics
Statistics oldMeanObj = StatsFactory.createMeanObject();
Statistics oldSumObj = StatsFactory.createSumObject();
Statistics oldMaxObj = StatsFactory.createMaxObject();
Statistics oldMinObj = StatsFactory.createMinObject();
Statistics oldExtremaObj = StatsFactory.createExtremaObject();
Statistics oldVarianceObj = StatsFactory.createVarianceObject();
Statistics oldDevStdObj = StatsFactory.createDevStdObject();
// Calculate sum and mean
calculateSumMean(oldSumObj, oldMeanObj);
// Calculate Max and Min and Extrema
calculateMaxMinExtrema(oldMinObj, oldMaxObj, oldExtremaObj);
// Calculate Variance and Std
calculateVarianceAndStd(oldVarianceObj, oldDevStdObj);
// Addition of dummy data
Statistics newMeanObj = StatsFactory.createMeanObject();
newMeanObj.addSample(1);
Statistics newSumObj = StatsFactory.createSumObject();
newSumObj.addSample(1);
Statistics newMaxObj = StatsFactory.createMaxObject();
newMaxObj.addSample(1);
Statistics newMinObj = StatsFactory.createMinObject();
newMinObj.addSample(1);
Statistics newExtremaObj = StatsFactory.createExtremaObject();
newExtremaObj.addSample(1);
Statistics newVarianceObj = StatsFactory.createVarianceObject();
newVarianceObj.addSample(1);
Statistics newDevStdObj = StatsFactory.createDevStdObject();
newDevStdObj.addSample(1);
// Statistics accumulation
newMeanObj.accumulateStats(oldMeanObj);
newSumObj.accumulateStats(oldSumObj);
newMaxObj.accumulateStats(oldMaxObj);
newMinObj.accumulateStats(oldMinObj);
newExtremaObj.accumulateStats(oldExtremaObj);
newVarianceObj.accumulateStats(oldVarianceObj);
newDevStdObj.accumulateStats(oldDevStdObj);
// Storage of the updated statistics
double newMeanUpdated = (Double) (newMeanObj.getResult());
double newSumUpdated = (Double) (newSumObj.getResult());
double newMaxUpdated = (Double) (newMaxObj.getResult());
double newMinUpdated = (Double) (newMinObj.getResult());
double[] newExtrema = (double[]) (newExtremaObj.getResult());
double newExmin = newExtrema[0];
double newExmax = newExtrema[1];
double newVarianceUpdated = (Double) (newVarianceObj.getResult());
double newStdUpdated = (Double) (newDevStdObj.getResult());
// New calculation of the statistics
double sum = 0;
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
sum += testArray[i];
if (testArray[i] > max) {
max = testArray[i];
}
if (testArray[i] < min) {
min = testArray[i];
}
}
double meanCalc = (sum + 1) / ARRAY_DIMENSIONS;
double sumCalc = (sum + 1);
double maxCalc = 1 > max ? 1 : max;
double minCalc = 1 < min ? 1 : min;
double varianceCalc = 0;
double stdCalc = 0;
double sum2 = 0;
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
sum2 += Math.pow((testArray[i] - meanCalc), 2);
}
sum2 += Math.pow((1 - meanCalc), 2);
varianceCalc = sum2 / (ARRAY_DIMENSIONS);
stdCalc = Math.sqrt(varianceCalc);
// Comparison
assertEquals(meanCalc, newMeanUpdated, TOLERANCE);
assertEquals(sumCalc, newSumUpdated, TOLERANCE);
assertEquals(maxCalc, newMaxUpdated, TOLERANCE);
assertEquals(minCalc, newMinUpdated, TOLERANCE);
assertEquals(maxCalc, newExmax, TOLERANCE);
assertEquals(minCalc, newExmin, TOLERANCE);
assertEquals(varianceCalc, newVarianceUpdated, TOLERANCE);
assertEquals(stdCalc, newStdUpdated, TOLERANCE);
}
// This test is used for checking if the statistics are correctly cleared
@Test
public void testEmptyStats() {
// Addition of dummy data
Statistics newMeanObj = StatsFactory.createMeanObject();
newMeanObj.addSample(1);
Statistics newSumObj = StatsFactory.createSumObject();
newSumObj.addSample(1);
Statistics newMaxObj = StatsFactory.createMaxObject();
newMaxObj.addSample(1);
Statistics newMinObj = StatsFactory.createMinObject();
newMinObj.addSample(1);
Statistics newExtremaObj = StatsFactory.createExtremaObject();
newExtremaObj.addSample(1);
Statistics newVarianceObj = StatsFactory.createVarianceObject();
newVarianceObj.addSample(1);
Statistics newDevStdObj = StatsFactory.createDevStdObject();
newDevStdObj.addSample(1);
Statistics newHistObj = StatsFactory.createHistogramObject(numBins, minBound, maxBound);
newHistObj.addSample(1);
Statistics newModeObj = StatsFactory.createModeObject(numBins, minBound, maxBound);
newModeObj.addSample(1);
Statistics newMedianObj = StatsFactory.createMedianObject(minBound, maxBound);
newMedianObj.addSample(1);
// Clearing of the statistics
newMeanObj.clearStats();
newSumObj.clearStats();
newMaxObj.clearStats();
newMinObj.clearStats();
newExtremaObj.clearStats();
newVarianceObj.clearStats();
newDevStdObj.clearStats();
newHistObj.clearStats();
newModeObj.clearStats();
newMedianObj.clearStats();
// Storage of the cleared statistics
double newMeanUpdated = (Double) (newMeanObj.getResult());
double newSumUpdated = (Double) (newSumObj.getResult());
double newMaxUpdated = (Double) (newMaxObj.getResult());
double newMinUpdated = (Double) (newMinObj.getResult());
double[] newExtrema = (double[]) (newExtremaObj.getResult());
double newExmin = newExtrema[0];
double newExmax = newExtrema[1];
double newVarianceUpdated = (Double) (newVarianceObj.getResult());
double newStdUpdated = (Double) (newDevStdObj.getResult());
double[] newHistUpdated = (double[]) (newHistObj.getResult());
double newModeUpdated = (Double) (newModeObj.getResult());
double newMedianUpdated = (Double) (newMedianObj.getResult());
// Comparison
assertEquals(0, newMeanUpdated, TOLERANCE);
assertEquals(0, newSumUpdated, TOLERANCE);
assertEquals(Double.NEGATIVE_INFINITY, newMaxUpdated, TOLERANCE);
assertEquals(Double.POSITIVE_INFINITY, newMinUpdated, TOLERANCE);
assertEquals(Double.NEGATIVE_INFINITY, newExmax, TOLERANCE);
assertEquals(Double.POSITIVE_INFINITY, newExmin, TOLERANCE);
assertEquals(Double.NaN, newVarianceUpdated, TOLERANCE);
assertEquals(Double.NaN, newStdUpdated, TOLERANCE);
for (int i = 0; i < numBins; i++) {
assertEquals(0, newHistUpdated[i], TOLERANCE);
}
assertEquals(0, newModeUpdated, TOLERANCE);
assertEquals(Double.NaN, newMedianUpdated, TOLERANCE);
}
/*
* These tests are used for checking if the accumulateStats() method returns an exception when the given statistical object does not belong to the
* same StatsType of the receiver or if it is not supported
*/
@Test(expected = IllegalArgumentException.class)
public void testMeanException() {
meanObj.accumulateStats(sumObj);
}
@Test(expected = IllegalArgumentException.class)
public void testSumException() {
sumObj.accumulateStats(meanObj);
}
@Test(expected = IllegalArgumentException.class)
public void testMaxException() {
maxObj.accumulateStats(sumObj);
}
@Test(expected = IllegalArgumentException.class)
public void testMinException() {
minObj.accumulateStats(sumObj);
}
@Test(expected = IllegalArgumentException.class)
public void testExtremaException() {
extremaObj.accumulateStats(sumObj);
}
@Test(expected = IllegalArgumentException.class)
public void testVarianceException() {
varianceObj.accumulateStats(sumObj);
}
@Test(expected = IllegalArgumentException.class)
public void testDevStdException() {
devstdObj.accumulateStats(sumObj);
}
@Test(expected = UnsupportedOperationException.class)
public void testHistException() {
histogramObj.accumulateStats(sumObj);
}
@Test(expected = UnsupportedOperationException.class)
public void testModeException() {
modeObj.accumulateStats(sumObj);
}
@Test(expected = UnsupportedOperationException.class)
public void testMedianException() {
medianObj.accumulateStats(sumObj);
}
private double calculateSumMean(Statistics sumObj, Statistics meanObj) {
double sum = 0;
// Mean and sum calculation
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
sum += testArray[i];
sumObj.addSample(testArray[i]);
meanObj.addSample(testArray[i]);
}
return sum;
}
private double[] calculateMaxMinExtrema(Statistics minObj, Statistics maxObj, Statistics extremaObj) {
double[] minMax = new double[]{Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY};
// Maximum and minimum calculation
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
if (testArray[i] > minMax[1]) {
minMax[1] = testArray[i];
}
if (testArray[i] < minMax[0]) {
minMax[0] = testArray[i];
}
minObj.addSample(testArray[i]);
maxObj.addSample(testArray[i]);
extremaObj.addSample(testArray[i]);
}
return minMax;
}
private double calculateVarianceAndStd(Statistics varianceObj, Statistics devstdObj) {
double sum = 0;
// Variance and standard deviation calculation
for (int i = 0; i < ARRAY_DIMENSIONS; i++) {
sum += testArray[i];
varianceObj.addSample(testArray[i]);
devstdObj.addSample(testArray[i]);
}
return sum;
}
}
| |
package mil.dds.anet.test.integration.db;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.graphql_java_generator.exception.GraphQLRequestExecutionException;
import com.graphql_java_generator.exception.GraphQLRequestPreparationException;
import io.dropwizard.testing.junit5.DropwizardAppExtension;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import mil.dds.anet.AnetObjectEngine;
import mil.dds.anet.beans.ApprovalStep;
import mil.dds.anet.beans.ApprovalStep.ApprovalStepType;
import mil.dds.anet.beans.Organization;
import mil.dds.anet.beans.Person;
import mil.dds.anet.beans.Report;
import mil.dds.anet.beans.Report.ReportState;
import mil.dds.anet.beans.ReportPerson;
import mil.dds.anet.config.AnetConfiguration;
import mil.dds.anet.database.EmailDao;
import mil.dds.anet.database.ReportDao;
import mil.dds.anet.test.client.util.MutationExecutor;
import mil.dds.anet.test.integration.config.AnetTestConfiguration;
import mil.dds.anet.test.integration.utils.EmailResponse;
import mil.dds.anet.test.integration.utils.FakeSmtpServer;
import mil.dds.anet.test.integration.utils.TestApp;
import mil.dds.anet.test.integration.utils.TestBeans;
import mil.dds.anet.test.resources.AbstractResourceTest;
import mil.dds.anet.threads.AnetEmailWorker;
import mil.dds.anet.threads.FutureEngagementWorker;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(TestApp.class)
public class FutureEngagementWorkerTest extends AbstractResourceTest {
private final static List<String> expectedIds = new ArrayList<>();
private final static List<String> unexpectedIds = new ArrayList<>();
private static FutureEngagementWorker futureEngagementWorker;
private static FakeSmtpServer emailServer;
private static AnetEmailWorker emailWorker;
private static boolean executeEmailServerTests;
private static String allowedEmail;
@BeforeAll
@SuppressWarnings("unchecked")
public static void setUpClass() throws Exception {
final DropwizardAppExtension<AnetConfiguration> app = TestApp.app;
if (app.getConfiguration().getSmtp().isDisabled()) {
fail("'ANET_SMTP_DISABLE' system environment variable must have value 'false' to run test.");
}
executeEmailServerTests = Boolean.parseBoolean(
AnetTestConfiguration.getConfiguration().get("emailServerTestsExecute").toString());
allowedEmail =
"@" + ((List<String>) app.getConfiguration().getDictionaryEntry("domainNames")).get(0);
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
emailWorker = new AnetEmailWorker(app.getConfiguration(), engine.getEmailDao());
futureEngagementWorker =
new FutureEngagementWorker(app.getConfiguration(), engine.getReportDao());
emailServer = new FakeSmtpServer(app.getConfiguration().getSmtp());
// Flush all reports from previous tests
futureEngagementWorker.run();
// Flush all emails from previous tests
emailWorker.run();
// Clear the email server before starting testing
emailServer.clearEmailServer();
}
@AfterAll
public static void tearDownClass() throws Exception {
// Test that all emails have been correctly sent
testFutureEngagementWorkerEmail();
// Clear the email server after testing
emailServer.clearEmailServer();
emailWorker = null;
AnetEmailWorker.setInstance(null);
}
@Test
public void testNoReports() {
testFutureEngagementWorker(0);
}
@Test
public void reportsOK() {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report report = createTestReport("reportsOK_1", true, true, true);
engine.getReportDao().update(report);
final Report report2 = createTestReport("reportsOK_2", true, true, true);
engine.getReportDao().update(report2);
final Report report3 = createTestReport("reportsOK_3", true, true, true);
engine.getReportDao().update(report3);
expectedIds.add("reportsOK_1");
expectedIds.add("reportsOK_2");
expectedIds.add("reportsOK_3");
testFutureEngagementWorker(3);
// Reports should be draft now
testReportDraft(report.getUuid());
testReportDraft(report2.getUuid());
testReportDraft(report3.getUuid());
}
@Test
public void testReportDueInFuture() {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report report = createTestReport("testReportDueInFuture_1", true, true, true);
report.setEngagementDate(Instant.now().plus(Duration.ofDays(2L)));
engine.getReportDao().update(report);
unexpectedIds.add("testReportDueInFuture_1");
testFutureEngagementWorker(0);
}
@Test
public void testReportDueEndToday() {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report report = createTestReport("testReportDueEndToday_1", true, true, true);
report.setEngagementDate(Instant.now());
engine.getReportDao().update(report);
expectedIds.add("testReportDueEndToday_1");
testFutureEngagementWorker(1);
// Report should be draft now
testReportDraft(report.getUuid());
}
@Test
public void testGH3304()
throws GraphQLRequestExecutionException, GraphQLRequestPreparationException {
// Create a draft report
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final ReportDao reportDao = engine.getReportDao();
final Person author = getRegularUserBean();
final MutationExecutor authorMutationExecutor = getMutationExecutor(author.getDomainUsername());
final ReportPerson advisor = personToPrimaryReportAuthor(author);
final ReportPerson principal = personToPrimaryReportPerson(getSteveStevesonBean());
final Report draftReport = reportDao.insert(TestBeans.getTestReport("testGH3304",
getFutureDate(), null, Lists.newArrayList(advisor, principal)));
// Submit the report
authorMutationExecutor.submitReport("", draftReport.getUuid());
// This planned report gets approved automatically
final Report submittedReport = testReportState(draftReport.getUuid(), ReportState.APPROVED);
// Nothing should happen
testFutureEngagementWorker(0);
// Move the engagementDate from future to past to simulate time passing
submittedReport.setEngagementDate(getPastDate());
reportDao.update(submittedReport);
// State shouldn't have changed
final Report updatedReport = testReportState(submittedReport.getUuid(), ReportState.APPROVED);
// Report is no longer planned, so this should update it
testFutureEngagementWorker(1);
// This should send an email to the author
expectedIds.add("hunter+erin");
// State should be DRAFT now
final Report redraftedReport = testReportDraft(updatedReport.getUuid());
// Submit the report
authorMutationExecutor.submitReport("", redraftedReport.getUuid());
// This should send an email to the approver
expectedIds.add("hunter+jacob");
// State should be PENDING_APPROVAL
final Report resubmittedReport =
testReportState(redraftedReport.getUuid(), ReportState.PENDING_APPROVAL);
// Prior to the fix for GH-3304, it changed the report back to DRAFT and sent an email
testFutureEngagementWorker(0);
// State shouldn't have changed
testReportState(resubmittedReport.getUuid(), ReportState.PENDING_APPROVAL);
}
@Test
public void testReportApprovalStates() {
checkApprovalStepType(ApprovalStepType.PLANNING_APPROVAL, true, "1");
checkApprovalStepType(ApprovalStepType.REPORT_APPROVAL, false, "2");
}
private void checkApprovalStepType(final ApprovalStepType type, final boolean isFuture,
final String id) {
final String fullId = "checkApprovalStepType_" + id;
if (isFuture) {
expectedIds.add(fullId);
} else {
unexpectedIds.add(fullId);
}
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report report = createTestReport(fullId, false, false, isFuture);
final ApprovalStep as = createApprovalStep(type);
report.setApprovalStep(as);
report.setAdvisorOrgUuid(as.getRelatedObjectUuid());
engine.getReportDao().update(report);
testFutureEngagementWorker(isFuture ? 1 : 0);
if (isFuture) {
// Report should be draft now
testReportDraft(report.getUuid());
}
}
@Test
public void testReportStates() {
checkReportState(ReportState.APPROVED, true, "APPROVED");
checkReportState(ReportState.CANCELLED, false, "CANCELLED");
checkReportState(ReportState.DRAFT, false, "DRAFT");
checkReportState(ReportState.PENDING_APPROVAL, true, "PENDING_APPROVAL");
checkReportState(ReportState.PUBLISHED, true, "PUBLISHED");
checkReportState(ReportState.REJECTED, true, "REJECTED");
}
private void checkReportState(final ReportState state, final boolean isExpected,
final String id) {
final String fullId = "checkApprovalStepType_" + id;
if (isExpected) {
expectedIds.add(fullId);
} else {
unexpectedIds.add(fullId);
}
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report report = createTestReport(fullId, true, true, true);
report.setState(state);
engine.getReportDao().update(report);
testFutureEngagementWorker(isExpected ? 1 : 0);
if (isExpected) {
// Report should be draft now
testReportDraft(report.getUuid());
}
}
@Test
public void testApprovalStepReport() {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report report = createTestReport("testApprovalStepReport_1", true, true, true);
expectedIds.add("testApprovalStepReport_1");
testFutureEngagementWorker(1);
// Report should be draft now
testReportDraft(report.getUuid());
// Edit it & submit
final ApprovalStep step = createApprovalStep(ApprovalStepType.REPORT_APPROVAL);
report.setApprovalStep(step);
report.setAdvisorOrgUuid(step.getRelatedObjectUuid());
engine.getReportDao().update(report);
engine.getReportDao().submit(report, report.loadAuthors(engine.getContext()).join().get(0));
testReportState(report.getUuid(), ReportState.PENDING_APPROVAL);
testFutureEngagementWorker(0);
testReportState(report.getUuid(), ReportState.PENDING_APPROVAL);
}
@Test
public void testPlanningApprovalStepReport() {
final Report report = createTestReport("testPlanningApprovalStepReport_1", true, false, true);
expectedIds.add("testPlanningApprovalStepReport_1");
testFutureEngagementWorker(1);
// Report should be draft now
testReportDraft(report.getUuid());
}
@Test
public void testAutomaticallyApprovedReport() {
// Report in automatic approve step (no planning workflow)
final Report report = createTestReport("testAutomaticallyApprovedReport_1", false, true, true);
expectedIds.add("testAutomaticallyApprovedReport_1");
testFutureEngagementWorker(1);
// Report should be draft now
testReportDraft(report.getUuid());
}
@Test
public void testPublishedReport() {
final Report report = createPublishedTestReport("testPublishedReport_1");
expectedIds.add("testPublishedReport_1");
testFutureEngagementWorker(1);
// Report should be draft now
testReportDraft(report.getUuid());
}
private Report testReportDraft(final String uuid) {
return testReportState(uuid, ReportState.DRAFT);
}
private Report testReportState(final String uuid, final ReportState state) {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Report updatedReport = engine.getReportDao().getByUuid(uuid);
assertThat(updatedReport.getState()).isEqualTo(state);
return updatedReport;
}
// DB integration
private void testFutureEngagementWorker(final int expectedCount) {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final int emailSize = engine.getEmailDao().getAll().size();
futureEngagementWorker.run();
assertThat(engine.getEmailDao().getAll().size()).isEqualTo(emailSize + expectedCount);
}
// Email integration
private static void testFutureEngagementWorkerEmail() throws IOException, InterruptedException {
assumeTrue(executeEmailServerTests, "Email server tests configured to be skipped.");
// Make sure all messages have been (asynchronously) sent
emailWorker.run();
final List<EmailResponse> emails = emailServer.requestAllEmailsFromServer();
assertThat(emails.size()).isEqualTo(expectedIds.size());
emails.forEach(e -> assertThat(expectedIds).contains(e.to.text.split("@")[0]));
emails.forEach(e -> assertThat(unexpectedIds).doesNotContain(e.to.text.split("@")[0]));
}
private void setPastDate(final Report report) {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final ReportDao reportDao = engine.getReportDao();
report.setEngagementDate(getPastDate());
reportDao.update(report);
}
private Report createTestReport(final String toAddressId, final boolean addApprovalStep,
final boolean approve, final boolean setPastDate) {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final ReportDao reportDao = engine.getReportDao();
final ReportPerson author = personToReportAuthor(TestBeans.getTestPerson());
author.setEmailAddress(toAddressId + allowedEmail);
engine.getPersonDao().insert(author);
ApprovalStep approvalStep = null;
if (addApprovalStep) {
approvalStep = createApprovalStep(ApprovalStepType.PLANNING_APPROVAL);
}
final Report report = reportDao.insert(TestBeans.getTestReport(toAddressId, getFutureDate(),
approvalStep, ImmutableList.of(author)));
// Submit this report
reportDao.submit(report, author);
if (approvalStep != null && approve) {
// Approve this report
reportDao.approve(report, null, approvalStep);
}
if (setPastDate) {
setPastDate(report);
}
return report;
}
private ApprovalStep createApprovalStep(final ApprovalStepType type) {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final Organization organization = TestBeans.getTestOrganization();
engine.getOrganizationDao().insert(organization);
final ApprovalStep approvalStep = TestBeans.getTestApprovalStep(organization);
approvalStep.setType(type);
engine.getApprovalStepDao().insertAtEnd(approvalStep);
return approvalStep;
}
private Report createPublishedTestReport(final String toAddressId) {
final AnetObjectEngine engine = AnetObjectEngine.getInstance();
final ReportDao reportDao = engine.getReportDao();
final EmailDao emailDao = engine.getEmailDao();
final Report report = createTestReport(toAddressId, true, true, false);
// Publish this report
final int emailSize = emailDao.getAll().size();
reportDao.publish(report, null);
// Should have sent email for publication
assertThat(emailDao.getAll().size()).isEqualTo(emailSize + 1);
expectedIds.add(toAddressId);
setPastDate(report);
return report;
}
private Instant getFutureDate() {
return Instant.now().plus(1, ChronoUnit.HOURS);
}
private Instant getPastDate() {
return Instant.now().minus(1, ChronoUnit.HOURS);
}
}
| |
package li.earth.urchin.twic.sarcoptic;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
class StructImplementationRegistry extends ClassLoader {
public static final StructImplementationRegistry INSTANCE = new StructImplementationRegistry();
private final Map<Class<?>, Class<?>> cache = new ConcurrentHashMap<>();
private StructImplementationRegistry() {}
public <T extends Struct<T>> Class<? extends T> getImplementation(Class<T> type) {
return cache.computeIfAbsent(type, t -> makeImplementation(check(t).asSubclass(Struct.class))).asSubclass(type);
}
private Class<?> check(Class<?> ifaceType) {
if (ifaceType == null) throw new NullPointerException();
if (!ifaceType.isInterface()) throw new IllegalArgumentException(ifaceType + " is not an interface");
if (ifaceType.equals(Struct.class) || !Struct.class.isAssignableFrom(ifaceType)) {
throw new ClassCastException(ifaceType + " is not a proper subtype of Struct");
}
return ifaceType;
}
private <T extends Struct<T>> Class<? extends T> makeImplementation(Class<T> ifaceType) {
String implClassName = ifaceType.getName() + "Impl";
Map<String, Class<?>> properties = Arrays.stream(ifaceType.getDeclaredMethods())
.sorted(Comparator.comparing(Method::getName))
.collect(Collectors.toMap(Method::getName,
Method::getReturnType,
this::noMerges,
LinkedHashMap::new));
ClassWriter classWriter = makeClass(implClassName, StructImpl.class, ifaceType);
makeBindingConstructor(classWriter, implClassName, StructImpl.class, properties);
makeNullaryConstructor(classWriter, implClassName, StructImpl.class, properties);
for (Map.Entry<String, Class<?>> property : properties.entrySet()) {
makeProperty(classWriter, implClassName, property.getKey(), property.getValue(), properties);
}
byte[] implClassBytes = classWriter.toByteArray();
return defineClass(implClassName, implClassBytes, 0, implClassBytes.length).asSubclass(ifaceType);
}
private <T> T noMerges(T a, T b) {
throw new IllegalStateException();
}
private ClassWriter makeClass(String implClassName, Class<?> baseType, Class<?> ifaceType) {
ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
classWriter.visit(Opcodes.V1_8,
Opcodes.ACC_SUPER | Opcodes.ACC_PUBLIC,
ClassFileUtils.binaryName(implClassName),
signature(baseType, ifaceType),
ClassFileUtils.binaryName(baseType),
new String[]{ClassFileUtils.binaryName(ifaceType)});
return classWriter;
}
private void makeBindingConstructor(ClassWriter classWriter,
String implClassName,
Class<StructImpl> baseType,
Map<String, Class<?>> properties) {
MethodVisitor ctor = classWriter.visitMethod(Opcodes.ACC_PUBLIC,
"<init>",
constructorDescriptor(properties.values()),
null,
null);
ctor.visitCode();
ctor.visitVarInsn(Opcodes.ALOAD, 0);
ctor.visitMethodInsn(Opcodes.INVOKESPECIAL, ClassFileUtils.binaryName(baseType), "<init>", "()V", false);
int index = 1;
for (Map.Entry<String, Class<?>> property : properties.entrySet()) {
String propName = property.getKey();
Class<?> propType = property.getValue();
Kind propKind = Kind.of(propType);
ctor.visitVarInsn(Opcodes.ALOAD, 0);
ctor.visitVarInsn(propKind.loadOpcode, index);
ctor.visitFieldInsn(Opcodes.PUTFIELD,
ClassFileUtils.binaryName(implClassName),
propName,
propKind.descriptor(propType));
index += Type.getType(propType).getSize();
}
ctor.visitInsn(Opcodes.RETURN);
ctor.visitMaxs(0, 0);
ctor.visitEnd();
}
private void makeNullaryConstructor(ClassWriter classWriter,
String implClassName,
Class<StructImpl> baseType,
Map<String, Class<?>> properties) {
MethodVisitor ctor = classWriter.visitMethod(Opcodes.ACC_PUBLIC, "<init>", "()V", null, null);
ctor.visitCode();
ctor.visitVarInsn(Opcodes.ALOAD, 0);
for (Map.Entry<String, Class<?>> property : properties.entrySet()) {
Class<?> propType = property.getValue();
Kind propKind = Kind.of(propType);
ctor.visitInsn(propKind.zeroOpcode);
}
ctor.visitMethodInsn(Opcodes.INVOKESPECIAL,
ClassFileUtils.binaryName(implClassName),
"<init>",
constructorDescriptor(properties.values()),
false);
ctor.visitInsn(Opcodes.RETURN);
ctor.visitMaxs(0, 0);
ctor.visitEnd();
}
private String constructorDescriptor(Collection<Class<?>> parameterTypes) {
return parameterTypes.stream()
.map(c -> Kind.of(c).descriptor(c))
.collect(Collectors.joining("", "(", ")V"));
}
private void makeProperty(ClassWriter classWriter,
String implClassName,
String propName,
Class<?> propType,
Map<String, Class<?>> properties) {
Kind propKind = Kind.of(propType);
String propDescriptor = propKind.descriptor(propType);
makePropertyField(classWriter, propName, propDescriptor);
makePropertyAccessor(classWriter, implClassName, propName, propKind, propDescriptor);
makePropertySetter(implClassName, classWriter, propName, propKind, propDescriptor, properties);
}
private void makePropertyField(ClassWriter classWriter, String propName, String propDescriptor) {
FieldVisitor field = classWriter.visitField(Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL,
propName,
propDescriptor,
null,
null);
field.visitEnd();
}
private void makePropertyAccessor(ClassWriter classWriter,
String implClassName,
String propName,
Kind propKind,
String propDescriptor) {
MethodVisitor accessor = classWriter.visitMethod(Opcodes.ACC_PUBLIC,
propName,
"()" + propDescriptor,
null,
null);
accessor.visitCode();
accessor.visitVarInsn(Opcodes.ALOAD, 0);
accessor.visitFieldInsn(Opcodes.GETFIELD, ClassFileUtils.binaryName(implClassName), propName, propDescriptor);
accessor.visitInsn(propKind.returnOpcode);
accessor.visitMaxs(0, 0);
accessor.visitEnd();
}
private void makePropertySetter(String implClassName,
ClassWriter classWriter,
String propName,
Kind propKind,
String propDescriptor,
Map<String, Class<?>> properties) {
String desc = "(" + propDescriptor + ")L" + ClassFileUtils.binaryName(implClassName) + ";";
MethodVisitor setter = classWriter.visitMethod(Opcodes.ACC_PUBLIC,
"with" + capitalise(propName),
desc,
null,
null);
setter.visitCode();
setter.visitTypeInsn(Opcodes.NEW, ClassFileUtils.binaryName(implClassName));
setter.visitInsn(Opcodes.DUP);
properties.forEach((n, c) -> {
if (n.equals(propName)) {
setter.visitVarInsn(propKind.loadOpcode, 1);
} else {
setter.visitVarInsn(Opcodes.ALOAD, 0);
setter.visitFieldInsn(Opcodes.GETFIELD,
ClassFileUtils.binaryName(implClassName),
n,
Kind.of(c).descriptor(c));
}
});
setter.visitMethodInsn(Opcodes.INVOKESPECIAL,
ClassFileUtils.binaryName(implClassName),
"<init>",
constructorDescriptor(properties.values()),
false);
setter.visitInsn(Opcodes.ARETURN);
setter.visitMaxs(0, 0);
setter.visitEnd();
}
private String capitalise(String s) {
return s.substring(0, 1).toUpperCase() + s.substring(1);
}
private String signature(Class<?> baseType, Class<?> ifaceType) {
String baseBinaryName = ClassFileUtils.binaryName(baseType);
String ifaceDescriptor = Kind.OBJECT.descriptor(ifaceType);
return "L" + baseBinaryName + "<" + ifaceDescriptor + ">" + ";" + ifaceDescriptor;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.net;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import java.util.function.Consumer;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.EventLoop;
import org.apache.cassandra.concurrent.ExecutorLocals;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.exceptions.IncompatibleSchemaException;
import org.apache.cassandra.io.util.DataInputBuffer;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.net.Message.Header;
import org.apache.cassandra.net.FrameDecoder.Frame;
import org.apache.cassandra.net.FrameDecoder.FrameProcessor;
import org.apache.cassandra.net.FrameDecoder.IntactFrame;
import org.apache.cassandra.net.FrameDecoder.CorruptFrame;
import org.apache.cassandra.net.ResourceLimits.Limit;
import org.apache.cassandra.tracing.TraceState;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.NoSpamLogger;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static org.apache.cassandra.net.Crc.*;
import static org.apache.cassandra.utils.MonotonicClock.approxTime;
/**
* Core logic for handling inbound message deserialization and execution (in tandem with {@link FrameDecoder}).
*
* Handles small and large messages, corruption, flow control, dispatch of message processing onto an appropriate
* thread pool.
*
* # Interaction with {@link FrameDecoder}
*
* {@link InboundMessageHandler} sits on top of a {@link FrameDecoder} in the Netty pipeline, and is tightly
* coupled with it.
*
* {@link FrameDecoder} decodes inbound frames and relies on a supplied {@link FrameProcessor} to act on them.
* {@link InboundMessageHandler} provides two implementations of that interface:
* - {@link #process(Frame)} is the default, primary processor, and the primary entry point to this class
* - {@link UpToOneMessageFrameProcessor}, supplied to the decoder when the handler is reactivated after being
* put in waiting mode due to lack of acquirable reserve memory capacity permits
*
* Return value of {@link FrameProcessor#process(Frame)} determines whether the decoder should keep processing
* frames (if {@code true} is returned) or stop until explicitly reactivated (if {@code false} is). To reactivate
* the decoder (once notified of available resource permits), {@link FrameDecoder#reactivate()} is invoked.
*
* # Frames
*
* {@link InboundMessageHandler} operates on frames of messages, and there are several kinds of them:
* 1. {@link IntactFrame} that are contained. As names suggest, these contain one or multiple fully contained
* messages believed to be uncorrupted. Guaranteed to not contain an part of an incomplete message.
* See {@link #processFrameOfContainedMessages(ShareableBytes, Limit, Limit)}.
* 2. {@link IntactFrame} that are NOT contained. These are uncorrupted parts of a large message split over multiple
* parts due to their size. Can represent first or subsequent frame of a large message.
* See {@link #processFirstFrameOfLargeMessage(IntactFrame, Limit, Limit)} and
* {@link #processSubsequentFrameOfLargeMessage(Frame)}.
* 3. {@link CorruptFrame} with corrupt header. These are unrecoverable, and force a connection to be dropped.
* 4. {@link CorruptFrame} with a valid header, but corrupt payload. These can be either contained or uncontained.
* - contained frames with corrupt payload can be gracefully dropped without dropping the connection
* - uncontained frames with corrupt payload can be gracefully dropped unless they represent the first
* frame of a new large message, as in that case we don't know how many bytes to skip
* See {@link #processCorruptFrame(CorruptFrame)}.
*
* Fundamental frame invariants:
* 1. A contained frame can only have fully-encapsulated messages - 1 to n, that don't cross frame boundaries
* 2. An uncontained frame can hold a part of one message only. It can NOT, say, contain end of one large message
* and a beginning of another one. All the bytes in an uncontained frame always belong to a single message.
*
* # Small vs large messages
*
* A single handler is equipped to process both small and large messages, potentially interleaved, but the logic
* differs depending on size. Small messages are deserialized in place, and then handed off to an appropriate
* thread pool for processing. Large messages accumulate frames until completion of a message, then hand off
* the untouched frames to the correct thread pool for the verb to be deserialized there and immediately processed.
*
* See {@link LargeMessage} for details of the large-message accumulating state-machine, and {@link ProcessMessage}
* and its inheritors for the differences in execution.
*
* # Flow control (backpressure)
*
* To prevent nodes from overwhelming and bringing each other to the knees with more inbound messages that
* can be processed in a timely manner, {@link InboundMessageHandler} implements a strict flow control policy.
*
* Before we attempt to process a message fully, we first infer its size from the stream. Then we attempt to
* acquire memory permits for a message of that size. If we succeed, then we move on actually process the message.
* If we fail, the frame decoder deactivates until sufficient permits are released for the message to be processed
* and the handler is activated again. Permits are released back once the message has been fully processed -
* after the verb handler has been invoked - on the {@link Stage} for the {@link Verb} of the message.
*
* Every connection has an exclusive number of permits allocated to it (by default 4MiB). In addition to it,
* there is a per-endpoint reserve capacity and a global reserve capacity {@link Limit}, shared between all
* connections from the same host and all connections, respectively. So long as long as the handler stays within
* its exclusive limit, it doesn't need to tap into reserve capacity.
*
* If tapping into reserve capacity is necessary, but the handler fails to acquire capacity from either
* endpoint of global reserve (and it needs to acquire from both), the handler and its frame decoder become
* inactive and register with a {@link WaitQueue} of the appropriate type, depending on which of the reserves
* couldn't be tapped into. Once enough messages have finished processing and had their permits released back
* to the reserves, {@link WaitQueue} will reactivate the sleeping handlers and they'll resume processing frames.
*
* The reason we 'split' reserve capacity into two limits - endpoing and global - is to guarantee liveness, and
* prevent single endpoint's connections from taking over the whole reserve, starving other connections.
*
* One permit per byte of serialized message gets acquired. When inflated on-heap, each message will occupy more
* than that, necessarily, but despite wide variance, it's a good enough proxy that correlates with on-heap footprint.
*/
public class InboundMessageHandler extends ChannelInboundHandlerAdapter implements FrameProcessor
{
private static final Logger logger = LoggerFactory.getLogger(InboundMessageHandler.class);
private static final NoSpamLogger noSpamLogger = NoSpamLogger.getLogger(logger, 1L, TimeUnit.SECONDS);
private static final Message.Serializer serializer = Message.serializer;
private final FrameDecoder decoder;
private final ConnectionType type;
private final Channel channel;
private final InetAddressAndPort self;
private final InetAddressAndPort peer;
private final int version;
private final int largeThreshold;
private LargeMessage largeMessage;
private final long queueCapacity;
volatile long queueSize = 0L;
private static final AtomicLongFieldUpdater<InboundMessageHandler> queueSizeUpdater =
AtomicLongFieldUpdater.newUpdater(InboundMessageHandler.class, "queueSize");
private final Limit endpointReserveCapacity;
private final WaitQueue endpointWaitQueue;
private final Limit globalReserveCapacity;
private final WaitQueue globalWaitQueue;
private final OnHandlerClosed onClosed;
private final InboundMessageCallbacks callbacks;
private final Consumer<Message<?>> consumer;
// wait queue handle, non-null if we overrun endpoint or global capacity and request to be resumed once it's released
private WaitQueue.Ticket ticket = null;
long corruptFramesRecovered, corruptFramesUnrecovered;
long receivedCount, receivedBytes;
long throttledCount, throttledNanos;
private boolean isClosed;
InboundMessageHandler(FrameDecoder decoder,
ConnectionType type,
Channel channel,
InetAddressAndPort self,
InetAddressAndPort peer,
int version,
int largeThreshold,
long queueCapacity,
Limit endpointReserveCapacity,
Limit globalReserveCapacity,
WaitQueue endpointWaitQueue,
WaitQueue globalWaitQueue,
OnHandlerClosed onClosed,
InboundMessageCallbacks callbacks,
Consumer<Message<?>> consumer)
{
this.decoder = decoder;
this.type = type;
this.channel = channel;
this.self = self;
this.peer = peer;
this.version = version;
this.largeThreshold = largeThreshold;
this.queueCapacity = queueCapacity;
this.endpointReserveCapacity = endpointReserveCapacity;
this.endpointWaitQueue = endpointWaitQueue;
this.globalReserveCapacity = globalReserveCapacity;
this.globalWaitQueue = globalWaitQueue;
this.onClosed = onClosed;
this.callbacks = callbacks;
this.consumer = consumer;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
{
/*
* InboundMessageHandler works in tandem with FrameDecoder to implement flow control
* and work stashing optimally. We rely on FrameDecoder to invoke the provided
* FrameProcessor rather than on the pipeline and invocations of channelRead().
* process(Frame) is the primary entry point for this class.
*/
throw new IllegalStateException("InboundMessageHandler doesn't expect channelRead() to be invoked");
}
@Override
public void handlerAdded(ChannelHandlerContext ctx)
{
decoder.activate(this); // the frame decoder starts inactive until explicitly activated by the added inbound message handler
}
@Override
public boolean process(Frame frame) throws IOException
{
if (frame instanceof IntactFrame)
return processIntactFrame((IntactFrame) frame, endpointReserveCapacity, globalReserveCapacity);
processCorruptFrame((CorruptFrame) frame);
return true;
}
private boolean processIntactFrame(IntactFrame frame, Limit endpointReserve, Limit globalReserve) throws IOException
{
if (frame.isSelfContained)
return processFrameOfContainedMessages(frame.contents, endpointReserve, globalReserve);
else if (null == largeMessage)
return processFirstFrameOfLargeMessage(frame, endpointReserve, globalReserve);
else
return processSubsequentFrameOfLargeMessage(frame);
}
/*
* Handle contained messages (not crossing boundaries of the frame) - both small and large, for the inbound
* definition of large (breaching the size threshold for what we are willing to process on event-loop vs.
* off event-loop).
*/
private boolean processFrameOfContainedMessages(ShareableBytes bytes, Limit endpointReserve, Limit globalReserve) throws IOException
{
while (bytes.hasRemaining())
if (!processOneContainedMessage(bytes, endpointReserve, globalReserve))
return false;
return true;
}
private boolean processOneContainedMessage(ShareableBytes bytes, Limit endpointReserve, Limit globalReserve) throws IOException
{
ByteBuffer buf = bytes.get();
long currentTimeNanos = approxTime.now();
Header header = serializer.extractHeader(buf, peer, currentTimeNanos, version);
long timeElapsed = currentTimeNanos - header.createdAtNanos;
int size = serializer.inferMessageSize(buf, buf.position(), buf.limit(), version);
if (approxTime.isAfter(currentTimeNanos, header.expiresAtNanos))
{
callbacks.onHeaderArrived(size, header, timeElapsed, NANOSECONDS);
callbacks.onArrivedExpired(size, header, false, timeElapsed, NANOSECONDS);
receivedCount++;
receivedBytes += size;
bytes.skipBytes(size);
return true;
}
if (!acquireCapacity(endpointReserve, globalReserve, size, currentTimeNanos, header.expiresAtNanos))
return false;
callbacks.onHeaderArrived(size, header, timeElapsed, NANOSECONDS);
callbacks.onArrived(size, header, timeElapsed, NANOSECONDS);
receivedCount++;
receivedBytes += size;
if (size <= largeThreshold)
processSmallMessage(bytes, size, header);
else
processLargeMessage(bytes, size, header);
return true;
}
private void processSmallMessage(ShareableBytes bytes, int size, Header header)
{
ByteBuffer buf = bytes.get();
final int begin = buf.position();
final int end = buf.limit();
buf.limit(begin + size); // cap to expected message size
Message<?> message = null;
try (DataInputBuffer in = new DataInputBuffer(buf, false))
{
Message<?> m = serializer.deserialize(in, header, version);
if (in.available() > 0) // bytes remaining after deser: deserializer is busted
throw new InvalidSerializedSizeException(header.verb, size, size - in.available());
message = m;
}
catch (IncompatibleSchemaException e)
{
callbacks.onFailedDeserialize(size, header, e);
noSpamLogger.info("{} incompatible schema encountered while deserializing a message", id(), e);
}
catch (Throwable t)
{
JVMStabilityInspector.inspectThrowable(t, false);
callbacks.onFailedDeserialize(size, header, t);
logger.error("{} unexpected exception caught while deserializing a message", id(), t);
}
finally
{
if (null == message)
releaseCapacity(size);
// no matter what, set position to the beginning of the next message and restore limit, so that
// we can always keep on decoding the frame even on failure to deserialize previous message
buf.position(begin + size);
buf.limit(end);
}
if (null != message)
dispatch(new ProcessSmallMessage(message, size));
}
// for various reasons, it's possible for a large message to be contained in a single frame
private void processLargeMessage(ShareableBytes bytes, int size, Header header)
{
new LargeMessage(size, header, bytes.sliceAndConsume(size).share()).schedule();
}
/*
* Handling of multi-frame large messages
*/
private boolean processFirstFrameOfLargeMessage(IntactFrame frame, Limit endpointReserve, Limit globalReserve) throws IOException
{
ShareableBytes bytes = frame.contents;
ByteBuffer buf = bytes.get();
long currentTimeNanos = approxTime.now();
Header header = serializer.extractHeader(buf, peer, currentTimeNanos, version);
int size = serializer.inferMessageSize(buf, buf.position(), buf.limit(), version);
boolean expired = approxTime.isAfter(currentTimeNanos, header.expiresAtNanos);
if (!expired && !acquireCapacity(endpointReserve, globalReserve, size, currentTimeNanos, header.expiresAtNanos))
return false;
callbacks.onHeaderArrived(size, header, currentTimeNanos - header.createdAtNanos, NANOSECONDS);
receivedBytes += buf.remaining();
largeMessage = new LargeMessage(size, header, expired);
largeMessage.supply(frame);
return true;
}
private boolean processSubsequentFrameOfLargeMessage(Frame frame)
{
receivedBytes += frame.frameSize;
if (largeMessage.supply(frame))
{
receivedCount++;
largeMessage = null;
}
return true;
}
/*
* We can handle some corrupt frames gracefully without dropping the connection and losing all the
* queued up messages, but not others.
*
* Corrupt frames that *ARE NOT* safe to skip gracefully and require the connection to be dropped:
* - any frame with corrupt header (!frame.isRecoverable())
* - first corrupt-payload frame of a large message (impossible to infer message size, and without it
* impossible to skip the message safely
*
* Corrupt frames that *ARE* safe to skip gracefully, without reconnecting:
* - any self-contained frame with a corrupt payload (but not header): we lose all the messages in the
* frame, but that has no effect on subsequent ones
* - any non-first payload-corrupt frame of a large message: we know the size of the large message in
* flight, so we just skip frames until we've seen all its bytes; we only lose the large message
*/
private void processCorruptFrame(CorruptFrame frame) throws InvalidCrc
{
if (!frame.isRecoverable())
{
corruptFramesUnrecovered++;
throw new InvalidCrc(frame.readCRC, frame.computedCRC);
}
else if (frame.isSelfContained)
{
receivedBytes += frame.frameSize;
corruptFramesRecovered++;
noSpamLogger.warn("{} invalid, recoverable CRC mismatch detected while reading messages (corrupted self-contained frame)", id());
}
else if (null == largeMessage) // first frame of a large message
{
receivedBytes += frame.frameSize;
corruptFramesUnrecovered++;
noSpamLogger.error("{} invalid, unrecoverable CRC mismatch detected while reading messages (corrupted first frame of a large message)", id());
throw new InvalidCrc(frame.readCRC, frame.computedCRC);
}
else // subsequent frame of a large message
{
processSubsequentFrameOfLargeMessage(frame);
corruptFramesRecovered++;
noSpamLogger.warn("{} invalid, recoverable CRC mismatch detected while reading a large message", id());
}
}
private void onEndpointReserveCapacityRegained(Limit endpointReserve, long elapsedNanos)
{
onReserveCapacityRegained(endpointReserve, globalReserveCapacity, elapsedNanos);
}
private void onGlobalReserveCapacityRegained(Limit globalReserve, long elapsedNanos)
{
onReserveCapacityRegained(endpointReserveCapacity, globalReserve, elapsedNanos);
}
private void onReserveCapacityRegained(Limit endpointReserve, Limit globalReserve, long elapsedNanos)
{
if (isClosed)
return;
assert channel.eventLoop().inEventLoop();
ticket = null;
throttledNanos += elapsedNanos;
try
{
/*
* Process up to one message using supplied overriden reserves - one of them pre-allocated,
* and guaranteed to be enough for one message - then, if no obstacles enountered, reactivate
* the frame decoder using normal reserve capacities.
*/
if (processUpToOneMessage(endpointReserve, globalReserve))
decoder.reactivate();
}
catch (Throwable t)
{
exceptionCaught(t);
}
}
// return true if the handler should be reactivated - if no new hurdles were encountered,
// like running out of the other kind of reserve capacity
private boolean processUpToOneMessage(Limit endpointReserve, Limit globalReserve) throws IOException
{
UpToOneMessageFrameProcessor processor = new UpToOneMessageFrameProcessor(endpointReserve, globalReserve);
decoder.processBacklog(processor);
return processor.isActive;
}
/*
* Process at most one message. Won't always be an entire one (if the message in the head of line
* is a large one, and there aren't sufficient frames to decode it entirely), but will never be more than one.
*/
private class UpToOneMessageFrameProcessor implements FrameProcessor
{
private final Limit endpointReserve;
private final Limit globalReserve;
boolean isActive = true;
boolean firstFrame = true;
private UpToOneMessageFrameProcessor(Limit endpointReserve, Limit globalReserve)
{
this.endpointReserve = endpointReserve;
this.globalReserve = globalReserve;
}
@Override
public boolean process(Frame frame) throws IOException
{
if (firstFrame)
{
if (!(frame instanceof IntactFrame))
throw new IllegalStateException("First backlog frame must be intact");
firstFrame = false;
return processFirstFrame((IntactFrame) frame);
}
return processSubsequentFrame(frame);
}
private boolean processFirstFrame(IntactFrame frame) throws IOException
{
if (frame.isSelfContained)
{
isActive = processOneContainedMessage(frame.contents, endpointReserve, globalReserve);
return false; // stop after one message
}
else
{
isActive = processFirstFrameOfLargeMessage(frame, endpointReserve, globalReserve);
return isActive; // continue unless fallen behind coprocessor or ran out of reserve capacity again
}
}
private boolean processSubsequentFrame(Frame frame) throws IOException
{
if (frame instanceof IntactFrame)
processSubsequentFrameOfLargeMessage(frame);
else
processCorruptFrame((CorruptFrame) frame);
return largeMessage != null; // continue until done with the large message
}
}
/**
* Try to acquire permits for the inbound message. In case of failure, register with the right wait queue to be
* reactivated once permit capacity is regained.
*/
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private boolean acquireCapacity(Limit endpointReserve, Limit globalReserve, int bytes, long currentTimeNanos, long expiresAtNanos)
{
ResourceLimits.Outcome outcome = acquireCapacity(endpointReserve, globalReserve, bytes);
if (outcome == ResourceLimits.Outcome.INSUFFICIENT_ENDPOINT)
ticket = endpointWaitQueue.register(this, bytes, currentTimeNanos, expiresAtNanos);
else if (outcome == ResourceLimits.Outcome.INSUFFICIENT_GLOBAL)
ticket = globalWaitQueue.register(this, bytes, currentTimeNanos, expiresAtNanos);
if (outcome != ResourceLimits.Outcome.SUCCESS)
throttledCount++;
return outcome == ResourceLimits.Outcome.SUCCESS;
}
private ResourceLimits.Outcome acquireCapacity(Limit endpointReserve, Limit globalReserve, int bytes)
{
long currentQueueSize = queueSize;
/*
* acquireCapacity() is only ever called on the event loop, and as such queueSize is only ever increased
* on the event loop. If there is enough capacity, we can safely addAndGet() and immediately return.
*/
if (currentQueueSize + bytes <= queueCapacity)
{
queueSizeUpdater.addAndGet(this, bytes);
return ResourceLimits.Outcome.SUCCESS;
}
// we know we don't have enough local queue capacity for the entire message, so we need to borrow some from reserve capacity
long allocatedExcess = min(currentQueueSize + bytes - queueCapacity, bytes);
if (!globalReserve.tryAllocate(allocatedExcess))
return ResourceLimits.Outcome.INSUFFICIENT_GLOBAL;
if (!endpointReserve.tryAllocate(allocatedExcess))
{
globalReserve.release(allocatedExcess);
globalWaitQueue.signal();
return ResourceLimits.Outcome.INSUFFICIENT_ENDPOINT;
}
long newQueueSize = queueSizeUpdater.addAndGet(this, bytes);
long actualExcess = max(0, min(newQueueSize - queueCapacity, bytes));
/*
* It's possible that some permits were released at some point after we loaded current queueSize,
* and we can satisfy more of the permits using our exclusive per-connection capacity, needing
* less than previously estimated from the reserves. If that's the case, release the now unneeded
* permit excess back to endpoint/global reserves.
*/
if (actualExcess != allocatedExcess) // actualExcess < allocatedExcess
{
long excess = allocatedExcess - actualExcess;
endpointReserve.release(excess);
globalReserve.release(excess);
endpointWaitQueue.signal();
globalWaitQueue.signal();
}
return ResourceLimits.Outcome.SUCCESS;
}
private void releaseCapacity(int bytes)
{
long oldQueueSize = queueSizeUpdater.getAndAdd(this, -bytes);
if (oldQueueSize > queueCapacity)
{
long excess = min(oldQueueSize - queueCapacity, bytes);
endpointReserveCapacity.release(excess);
globalReserveCapacity.release(excess);
endpointWaitQueue.signal();
globalWaitQueue.signal();
}
}
/**
* Invoked to release capacity for a message that has been fully, successfully processed.
*
* Normally no different from invoking {@link #releaseCapacity(int)}, but is necessary for the verifier
* to be able to delay capacity release for backpressure testing.
*/
@VisibleForTesting
protected void releaseProcessedCapacity(int size, Header header)
{
releaseCapacity(size);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
{
try
{
exceptionCaught(cause);
}
catch (Throwable t)
{
logger.error("Unexpected exception in {}.exceptionCaught", this.getClass().getSimpleName(), t);
}
}
private void exceptionCaught(Throwable cause)
{
decoder.discard();
JVMStabilityInspector.inspectThrowable(cause, false);
if (cause instanceof Message.InvalidLegacyProtocolMagic)
logger.error("{} invalid, unrecoverable CRC mismatch detected while reading messages - closing the connection", id());
else
logger.error("{} unexpected exception caught while processing inbound messages; terminating connection", id(), cause);
channel.close();
}
@Override
public void channelInactive(ChannelHandlerContext ctx)
{
isClosed = true;
if (null != largeMessage)
largeMessage.abort();
if (null != ticket)
ticket.invalidate();
onClosed.call(this);
}
private EventLoop eventLoop()
{
return channel.eventLoop();
}
String id(boolean includeReal)
{
if (!includeReal)
return id();
return SocketFactory.channelId(peer, (InetSocketAddress) channel.remoteAddress(),
self, (InetSocketAddress) channel.localAddress(),
type, channel.id().asShortText());
}
String id()
{
return SocketFactory.channelId(peer, self, type, channel.id().asShortText());
}
/*
* A large-message frame-accumulating state machine.
*
* Collects intact frames until it's has all the bytes necessary to deserialize the large message,
* at which point it schedules a task on the appropriate {@link Stage},
* a task that deserializes the message and immediately invokes the verb handler.
*
* Also handles corrupt frames and potential expiry of the large message during accumulation:
* if it's taking the frames too long to arrive, there is no point in holding on to the
* accumulated frames, or in gathering more - so we release the ones we already have, and
* skip any remaining ones, alongside with returning memory permits early.
*/
private class LargeMessage
{
private final int size;
private final Header header;
private final List<ShareableBytes> buffers = new ArrayList<>();
private int received;
private boolean isExpired;
private boolean isCorrupt;
private LargeMessage(int size, Header header, boolean isExpired)
{
this.size = size;
this.header = header;
this.isExpired = isExpired;
}
private LargeMessage(int size, Header header, ShareableBytes bytes)
{
this(size, header, false);
buffers.add(bytes);
}
private void schedule()
{
dispatch(new ProcessLargeMessage(this));
}
/**
* Return true if this was the last frame of the large message.
*/
private boolean supply(Frame frame)
{
if (frame instanceof IntactFrame)
onIntactFrame((IntactFrame) frame);
else
onCorruptFrame();
received += frame.frameSize;
if (size == received)
onComplete();
return size == received;
}
private void onIntactFrame(IntactFrame frame)
{
boolean expires = approxTime.isAfter(header.expiresAtNanos);
if (!isExpired && !isCorrupt)
{
if (!expires)
{
buffers.add(frame.contents.sliceAndConsume(frame.frameSize).share());
return;
}
releaseBuffersAndCapacity(); // release resources once we transition from normal state to expired
}
frame.consume();
isExpired |= expires;
}
private void onCorruptFrame()
{
if (!isExpired && !isCorrupt)
releaseBuffersAndCapacity(); // release resources once we transition from normal state to corrupt
isCorrupt = true;
isExpired |= approxTime.isAfter(header.expiresAtNanos);
}
private void onComplete()
{
long timeElapsed = approxTime.now() - header.createdAtNanos;
if (!isExpired && !isCorrupt)
{
callbacks.onArrived(size, header, timeElapsed, NANOSECONDS);
schedule();
}
else if (isExpired)
{
callbacks.onArrivedExpired(size, header, isCorrupt, timeElapsed, NANOSECONDS);
}
else
{
callbacks.onArrivedCorrupt(size, header, timeElapsed, NANOSECONDS);
}
}
private void abort()
{
if (!isExpired && !isCorrupt)
releaseBuffersAndCapacity(); // release resources if in normal state when abort() is invoked
callbacks.onClosedBeforeArrival(size, header, received, isCorrupt, isExpired);
}
private void releaseBuffers()
{
buffers.forEach(ShareableBytes::release); buffers.clear();
}
private void releaseBuffersAndCapacity()
{
releaseBuffers(); releaseCapacity(size);
}
private Message deserialize()
{
try (ChunkedInputPlus input = ChunkedInputPlus.of(buffers))
{
Message<?> m = serializer.deserialize(input, header, version);
int remainder = input.remainder();
if (remainder > 0)
throw new InvalidSerializedSizeException(header.verb, size, size - remainder);
return m;
}
catch (IncompatibleSchemaException e)
{
callbacks.onFailedDeserialize(size, header, e);
noSpamLogger.info("{} incompatible schema encountered while deserializing a message", id(), e);
}
catch (Throwable t)
{
JVMStabilityInspector.inspectThrowable(t, false);
callbacks.onFailedDeserialize(size, header, t);
logger.error("{} unexpected exception caught while deserializing a message", id(), t);
}
finally
{
buffers.clear(); // closing the input will have ensured that the buffers were released no matter what
}
return null;
}
}
/**
* Submit a {@link ProcessMessage} task to the appropriate {@link Stage} for the {@link Verb}.
*/
private void dispatch(ProcessMessage task)
{
Header header = task.header();
TraceState state = Tracing.instance.initializeFromMessage(header);
if (state != null) state.trace("{} message received from {}", header.verb, header.from);
callbacks.onDispatched(task.size(), header);
header.verb.stage.execute(task, ExecutorLocals.create(state));
}
private abstract class ProcessMessage implements Runnable
{
/**
* Actually handle the message. Runs on the appropriate {@link Stage} for the {@link Verb}.
*
* Small messages will come pre-deserialized. Large messages will be deserialized on the stage,
* just in time, and only then processed.
*/
public void run()
{
Header header = header();
long currentTimeNanos = approxTime.now();
boolean expired = approxTime.isAfter(currentTimeNanos, header.expiresAtNanos);
boolean processed = false;
try
{
callbacks.onExecuting(size(), header, currentTimeNanos - header.createdAtNanos, NANOSECONDS);
if (expired)
{
callbacks.onExpired(size(), header, currentTimeNanos - header.createdAtNanos, NANOSECONDS);
return;
}
Message message = provideMessage();
if (null != message)
{
consumer.accept(message);
processed = true;
callbacks.onProcessed(size(), header);
}
}
finally
{
if (processed)
releaseProcessedCapacity(size(), header);
else
releaseCapacity(size());
releaseResources();
callbacks.onExecuted(size(), header, approxTime.now() - currentTimeNanos, NANOSECONDS);
}
}
abstract int size();
abstract Header header();
abstract Message provideMessage();
void releaseResources() {}
}
private class ProcessSmallMessage extends ProcessMessage
{
private final int size;
private final Message message;
ProcessSmallMessage(Message message, int size)
{
this.size = size;
this.message = message;
}
int size()
{
return size;
}
Header header()
{
return message.header;
}
Message provideMessage()
{
return message;
}
}
private class ProcessLargeMessage extends ProcessMessage
{
private final LargeMessage message;
ProcessLargeMessage(LargeMessage message)
{
this.message = message;
}
int size()
{
return message.size;
}
Header header()
{
return message.header;
}
Message provideMessage()
{
return message.deserialize();
}
@Override
void releaseResources()
{
message.releaseBuffers(); // releases buffers if they haven't been yet (by deserialize() call)
}
}
/**
* A special-purpose wait queue to park inbound message handlers that failed to allocate
* reserve capacity for a message in. Upon such failure a handler registers itself with
* a {@link WaitQueue} of the appropriate kind (either ENDPOINT or GLOBAL - if failed
* to allocate endpoint or global reserve capacity, respectively), stops processing any
* accumulated frames or receiving new ones, and waits - until reactivated.
*
* Every time permits are returned to an endpoint or global {@link Limit}, the respective
* queue gets signalled, and if there are any handlers registered in it, we will attempt
* to reactivate as many waiting handlers as current available reserve capacity allows
* us to - immediately, on the {@link #signal()}-calling thread. At most one such attempt
* will be in progress at any given time.
*
* Handlers that can be reactivated will be grouped by their {@link EventLoop} and a single
* {@link ReactivateHandlers} task will be scheduled per event loop, on the corresponding
* event loops.
*
* When run, the {@link ReactivateHandlers} task will ask each handler in its group to first
* process one message - using preallocated reserve capacity - and if no obstacles were met -
* reactivate the handlers, this time using their regular reserves.
*
* See {@link WaitQueue#schedule()}, {@link ReactivateHandlers#run()}, {@link Ticket#reactivateHandler(Limit)}.
*/
public static final class WaitQueue
{
enum Kind { ENDPOINT, GLOBAL }
private static final int NOT_RUNNING = 0;
@SuppressWarnings("unused")
private static final int RUNNING = 1;
private static final int RUN_AGAIN = 2;
private volatile int scheduled;
private static final AtomicIntegerFieldUpdater<WaitQueue> scheduledUpdater =
AtomicIntegerFieldUpdater.newUpdater(WaitQueue.class, "scheduled");
private final Kind kind;
private final Limit reserveCapacity;
private final ManyToOneConcurrentLinkedQueue<Ticket> queue = new ManyToOneConcurrentLinkedQueue<>();
private WaitQueue(Kind kind, Limit reserveCapacity)
{
this.kind = kind;
this.reserveCapacity = reserveCapacity;
}
public static WaitQueue endpoint(Limit endpointReserveCapacity)
{
return new WaitQueue(Kind.ENDPOINT, endpointReserveCapacity);
}
public static WaitQueue global(Limit globalReserveCapacity)
{
return new WaitQueue(Kind.GLOBAL, globalReserveCapacity);
}
private Ticket register(InboundMessageHandler handler, int bytesRequested, long registeredAtNanos, long expiresAtNanos)
{
Ticket ticket = new Ticket(this, handler, bytesRequested, registeredAtNanos, expiresAtNanos);
Ticket previous = queue.relaxedPeekLastAndOffer(ticket);
if (null == previous || !previous.isWaiting())
signal(); // only signal the queue if this handler is first to register
return ticket;
}
private void signal()
{
if (queue.relaxedIsEmpty())
return; // we can return early if no handlers have registered with the wait queue
if (NOT_RUNNING == scheduledUpdater.getAndUpdate(this, i -> min(RUN_AGAIN, i + 1)))
{
do
{
schedule();
}
while (RUN_AGAIN == scheduledUpdater.getAndDecrement(this));
}
}
private void schedule()
{
Map<EventLoop, ReactivateHandlers> tasks = null;
long currentTimeNanos = approxTime.now();
Ticket t;
while ((t = queue.peek()) != null)
{
if (!t.call()) // invalidated
{
queue.remove();
continue;
}
boolean isLive = t.isLive(currentTimeNanos);
if (isLive && !reserveCapacity.tryAllocate(t.bytesRequested))
{
if (!t.reset()) // the ticket was invalidated after being called but before now
{
queue.remove();
continue;
}
break; // TODO: traverse the entire queue to unblock handlers that have expired or invalidated tickets
}
if (null == tasks)
tasks = new IdentityHashMap<>();
queue.remove();
tasks.computeIfAbsent(t.handler.eventLoop(), e -> new ReactivateHandlers()).add(t, isLive);
}
if (null != tasks)
tasks.forEach(EventLoop::execute);
}
private class ReactivateHandlers implements Runnable
{
List<Ticket> tickets = new ArrayList<>();
long capacity = 0L;
private void add(Ticket ticket, boolean isLive)
{
tickets.add(ticket);
if (isLive) capacity += ticket.bytesRequested;
}
public void run()
{
Limit limit = new ResourceLimits.Basic(capacity);
try
{
for (Ticket ticket : tickets)
ticket.reactivateHandler(limit);
}
finally
{
/*
* Free up any unused capacity, if any. Will be non-zero if one or more handlers were closed
* when we attempted to run their callback, or used more of their other reserve; or if the first
* message in the unprocessed stream has expired in the narrow time window.
*/
long remaining = limit.remaining();
if (remaining > 0)
{
reserveCapacity.release(remaining);
signal();
}
}
}
}
private static final class Ticket
{
private static final int WAITING = 0;
private static final int CALLED = 1;
private static final int INVALIDATED = 2; // invalidated by a handler that got closed
private volatile int state;
private static final AtomicIntegerFieldUpdater<Ticket> stateUpdater =
AtomicIntegerFieldUpdater.newUpdater(Ticket.class, "state");
private final WaitQueue waitQueue;
private final InboundMessageHandler handler;
private final int bytesRequested;
private final long reigsteredAtNanos;
private final long expiresAtNanos;
private Ticket(WaitQueue waitQueue, InboundMessageHandler handler, int bytesRequested, long registeredAtNanos, long expiresAtNanos)
{
this.waitQueue = waitQueue;
this.handler = handler;
this.bytesRequested = bytesRequested;
this.reigsteredAtNanos = registeredAtNanos;
this.expiresAtNanos = expiresAtNanos;
}
private void reactivateHandler(Limit capacity)
{
long elapsedNanos = approxTime.now() - reigsteredAtNanos;
try
{
if (waitQueue.kind == Kind.ENDPOINT)
handler.onEndpointReserveCapacityRegained(capacity, elapsedNanos);
else
handler.onGlobalReserveCapacityRegained(capacity, elapsedNanos);
}
catch (Throwable t)
{
logger.error("{} exception caught while reactivating a handler", handler.id(), t);
}
}
private boolean isWaiting()
{
return state == WAITING;
}
private boolean isLive(long currentTimeNanos)
{
return !approxTime.isAfter(currentTimeNanos, expiresAtNanos);
}
private void invalidate()
{
state = INVALIDATED;
waitQueue.signal();
}
private boolean call()
{
return stateUpdater.compareAndSet(this, WAITING, CALLED);
}
private boolean reset()
{
return stateUpdater.compareAndSet(this, CALLED, WAITING);
}
}
}
public interface OnHandlerClosed
{
void call(InboundMessageHandler handler);
}
}
| |
/**
* Copyright 2017 The GreyCat Authors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package greycat.utility.json;
import greycat.*;
import greycat.internal.custom.KDTree;
import greycat.internal.custom.NDTree;
import greycat.plugin.NodeState;
import greycat.plugin.NodeStateCallback;
import greycat.struct.*;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.*;
/**
* @ignore ts
*/
public class JsonBuilder {
public static String buildJson(int type, Object elem) {
final boolean[] isFirst = {true};
final StringBuilder builder = new StringBuilder();
builder.append("[");
switch(type){
case Type.BOOL:
builder.append(Type.BOOL);
builder.append(",");
if ((Boolean) elem) {
builder.append("true");
} else {
builder.append("false");
}
break;
case Type.STRING:
builder.append(Type.STRING);
builder.append(",");
builder.append("\"");
builder.append((String) elem);
builder.append("\"");
break;
case Type.LONG:
builder.append(Type.LONG);
builder.append(", ");
builder.append((Long) elem);
break;
case Type.INT:
builder.append(Type.INT);
builder.append(",");
builder.append((Integer) elem);
break;
case Type.DOUBLE:
builder.append(Type.DOUBLE);
builder.append(",");
builder.append((Double) elem);
break;
case Type.DOUBLE_ARRAY:
builder.append(Type.DOUBLE_ARRAY);
builder.append(",");
builder.append("[");
DoubleArray castedArr = ((DoubleArray) elem);
for (int j = 0; j < castedArr.size(); j++) {
if (j != 0) {
builder.append(",");
}
builder.append(castedArr.get(j));
}
builder.append("]");
break;
case Type.LONG_ARRAY:
builder.append(Type.LONG_ARRAY);
builder.append(",");
builder.append("[");
LongArray castedArr2 = (LongArray) elem;
for (int j = 0; j < castedArr2.size(); j++) {
if (j != 0) {
builder.append(",");
}
builder.append(castedArr2.get(j));
}
builder.append("]");
break;
case Type.INT_ARRAY:
builder.append(Type.INT_ARRAY);
builder.append(",");
builder.append("[");
IntArray castedArr3 = (IntArray) elem;
for (int j = 0; j < castedArr3.size(); j++) {
if (j != 0) {
builder.append(",");
}
builder.append(castedArr3.get(j));
}
builder.append("]");
break;
case Type.STRING_ARRAY:
builder.append(Type.STRING_ARRAY);
builder.append(",");
builder.append("[");
StringArray castedStrArr = (StringArray) elem;
for (int j = 0; j < castedStrArr.size(); j++) {
if (j != 0) {
builder.append(",");
}
builder.append("\"");
builder.append(castedStrArr.get(j));
builder.append("\"");
}
builder.append("]");
break;
case Type.LONG_TO_LONG_MAP:
builder.append(Type.LONG_TO_LONG_MAP);
builder.append(",");
builder.append("{");
LongLongMap castedMapL2L = (LongLongMap) elem;
isFirst[0] = true;
castedMapL2L.each(new LongLongMapCallBack() {
@Override
public void on(long key, long value) {
if (!isFirst[0]) {
builder.append(",");
} else {
isFirst[0] = false;
}
builder.append("\"");
builder.append(key);
builder.append("\":");
builder.append(value);
}
});
builder.append("}");
break;
// @TODO A VERIFIER
case Type.LONG_TO_LONG_ARRAY_MAP:
builder.append(Type.LONG_TO_LONG_ARRAY_MAP);
builder.append(",");
builder.append("{");
LongLongArrayMap castedMapL2LA = (LongLongArrayMap) elem;
isFirst[0] = true;
Set<Long> keys = new HashSet<Long>();
castedMapL2LA.each(new LongLongArrayMapCallBack() {
@Override
public void on(long key, long value) {
keys.add(key);
}
});
final Long[] flatKeys = keys.toArray(new Long[keys.size()]);
for (int i = 0; i < flatKeys.length; i++) {
long[] values = castedMapL2LA.get(flatKeys[i]);
if (!isFirst[0]) {
builder.append(",");
} else {
isFirst[0] = false;
}
builder.append("\"");
builder.append(flatKeys[i]);
builder.append("\":[");
for (int j = 0; j < values.length; j++) {
if (j != 0) {
builder.append(",");
}
builder.append(values[j]);
}
builder.append("]");
}
builder.append("}");
break;
case Type.STRING_TO_INT_MAP:
builder.append(Type.STRING_TO_INT_MAP);
builder.append(",");
builder.append("{");
StringIntMap castedMapS2L = (StringIntMap) elem;
isFirst[0] = true;
castedMapS2L.each(new StringLongMapCallBack() {
@Override
public void on(String key, long value) {
if (!isFirst[0]) {
builder.append(",");
} else {
isFirst[0] = false;
}
builder.append("\"");
builder.append(key);
builder.append("\":");
builder.append(value);
}
});
builder.append("}");
break;
case Type.RELATION:
builder.append(Type.RELATION);
builder.append(",");
builder.append("[");
Relation castedRelArr = (Relation) elem;
for (int j = 0; j < castedRelArr.size(); j++) {
if (j != 0) {
builder.append(",");
}
builder.append(castedRelArr.get(j));
}
builder.append("]");
break;
case Type.DMATRIX:
builder.append(Type.DMATRIX);
builder.append(",");
builder.append("[");
DMatrix castedDMat = (DMatrix) elem;
builder.append(castedDMat.rows());
builder.append(",");
builder.append(castedDMat.columns());
builder.append(",");
for(int i = 0 ; i < castedDMat.rows(); i++) {
for(int j= 0; j < castedDMat.columns(); j++){
if(j != 0 || i != 0){
builder.append(",");
}
builder.append(castedDMat.get(i,j));
}
}
builder.append("]");
break;
case Type.LMATRIX:
builder.append(Type.LMATRIX);
builder.append(",");
builder.append("[");
LMatrix castedLMat = (LMatrix) elem;
builder.append(castedLMat.rows());
builder.append(",");
builder.append(castedLMat.columns());
builder.append(",");
for(int i = 0 ; i < castedLMat.rows(); i++) {
for(int j= 0; j < castedLMat.columns(); j++){
if(j != 0 || i != 0){
builder.append(",");
}
builder.append(castedLMat.get(i,j));
}
}
builder.append("]");
break;
case Type.ESTRUCT:
builder.append(Type.ESTRUCT);
builder.append(",");
EStruct castedEStruct = (EStruct) elem;
builder.append(castedEStruct.toJson());
break;
case Type.ESTRUCT_ARRAY:
builder.append(Type.ESTRUCT_ARRAY);
builder.append(",");
EStructArray castedEArr = (EStructArray) elem;
builder.append(castedEArr.toJson());
break;
case Type.ERELATION:
builder.append(Type.ERELATION);
builder.append(", ");
ERelation castedErel = (ERelation) elem;
// @Todo Switch from toString to toJson (Value is OK)
builder.append(castedErel.toString());
break;
case Type.TASK:
builder.append(Type.TASK);
builder.append(",");
builder.append("\"");
Task castedTask = (Task) elem;
// @Todo Switch from toString to toJson (value OK)
builder.append(castedTask.toString());
builder.append("\"");
break;
case Type.TASK_ARRAY:
builder.append(Type.TASK_ARRAY);
builder.append(",");
builder.append("[");
Task[] castedTaskArr = (Task[]) elem;
for (int i = 0; i < castedTaskArr.length; i++){
if(i != 0){
builder.append(",");
}
builder.append("\"");
// @Todo Switch from toString to toJson
builder.append(castedTaskArr[i].toString());
builder.append("\"");
}
builder.append("]");
break;
case Type.NODE:
builder.append(Type.NODE);
builder.append(",");
Node castedNode = (Node) elem;
final NodeState state = castedNode.graph().resolver().resolveState(castedNode);
isFirst[0] = true;
builder.append("{");
if (state != null) {
state.each(new NodeStateCallback() {
@Override
public void on(int attributeKey, int elemType, Object elem) {
if (elem != null) {
if(isFirst[0]){
isFirst[0] = false;
} else {
builder.append(",");
}
String resolveName = castedNode.graph().resolver().hashToString(attributeKey);
if (resolveName == null) {
resolveName = attributeKey + "";
}
builder.append("\"");
builder.append(resolveName);
builder.append("\":");
builder.append(JsonBuilder.buildJson(elemType,elem));
}
}
});
}
builder.append("}");
break;
case Type.INT_TO_INT_MAP:
builder.append(Type.INT_TO_INT_MAP);
builder.append(",");
builder.append("{");
IntIntMap castedMapI2I = (IntIntMap) elem;
isFirst[0] = true;
castedMapI2I.each(new IntIntMapCallBack() {
@Override
public void on(int key, int value) {
if (!isFirst[0]) {
builder.append(",");
} else {
isFirst[0] = false;
}
builder.append("\"");
builder.append(key);
builder.append("\":");
builder.append(value);
}
});
builder.append("}");
break;
case Type.INT_TO_STRING_MAP:
builder.append(Type.INT_TO_STRING_MAP);
builder.append(",");
builder.append("{");
IntStringMap castedMapI2S = (IntStringMap) elem;
isFirst[0] = true;
castedMapI2S.each(new IntStringMapCallBack() {
@Override
public void on(int key, String value) {
if (!isFirst[0]) {
builder.append(",");
} else {
isFirst[0] = false;
}
builder.append("\"");
builder.append(key);
builder.append("\":");
builder.append("\"");
builder.append(value);
builder.append("\"");
}
});
builder.append("}");
break;
case Type.INDEX:
try {
EStructArray castedIndex = (EStructArray) elem;
builder.append(Type.INDEX);
builder.append(",");
builder.append(buildJson(Type.ESTRUCT_ARRAY, castedIndex));
} catch(ClassCastException e){
Node castedNodeIndex = (Node) elem;
builder.append(Type.INDEX);
builder.append(",");
builder.append(buildJson(Type.NODE, castedNodeIndex));
}
break;
case Type.KDTREE:
KDTree castedKTree = (KDTree) elem;
builder.append(Type.KDTREE);
builder.append(",{");
builder.append(buildJson(Type.ESTRUCT_ARRAY, castedKTree.backend()));
builder.append("}");
break;
case Type.NDTREE:
NDTree castedNTree = (NDTree) elem;
builder.append(Type.NDTREE);
builder.append(",{");
builder.append(buildJson(Type.ESTRUCT_ARRAY, castedNTree.backend()));
builder.append("}");
break;
}
builder.append("]");
return builder.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.spatial4j.core.io.jts;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.spatial4j.core.context.jts.DatelineRule;
import com.spatial4j.core.context.jts.JtsSpatialContext;
import com.spatial4j.core.context.jts.JtsSpatialContextFactory;
import com.spatial4j.core.io.WKTReader;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.Shape;
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.algorithm.CGAlgorithms;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.Polygon;
/**
* Extends {@link com.spatial4j.core.io.WKTReader} adding support for polygons, using JTS.
*/
public class JtsWKTReader extends WKTReader {
protected final JtsSpatialContext ctx;
protected final ValidationRule validationRule;
protected final boolean autoIndex;
public JtsWKTReader(JtsSpatialContext ctx, JtsSpatialContextFactory factory) {
super(ctx, factory);
this.ctx = ctx;
this.validationRule = factory.validationRule;
this.autoIndex = factory.autoIndex;
}
/** @see JtsWKTReader.ValidationRule */
public ValidationRule getValidationRule() {
return validationRule;
}
/**
* JtsGeometry shapes are automatically validated when {@link #getValidationRule()} isn't
* {@code none}.
*/
public boolean isAutoValidate() {
return validationRule != ValidationRule.none;
}
/**
* If JtsGeometry shapes should be automatically prepared (i.e. optimized) when read via WKT.
*
* @see com.spatial4j.core.shape.jts.JtsGeometry#index()
*/
public boolean isAutoIndex() {
return autoIndex;
}
/** @see DatelineRule */
public DatelineRule getDatelineRule() {
return ctx.getDatelineRule();
}
@Override
protected Shape parseShapeByType(WKTReader.State state, String shapeType) throws ParseException {
if (shapeType.equalsIgnoreCase("POLYGON")) {
return parsePolygonShape(state);
} else if (shapeType.equalsIgnoreCase("MULTIPOLYGON")) {
return parseMulitPolygonShape(state);
}
return super.parseShapeByType(state, shapeType);
}
/**
* Bypasses {@link JtsSpatialContext#makeLineString(java.util.List)} so that we can more
* efficiently get the LineString without creating a {@code List<Point>}.
*/
@Override
protected Shape parseLineStringShape(WKTReader.State state) throws ParseException {
if (!ctx.useJtsLineString())
return super.parseLineStringShape(state);
if (state.nextIfEmptyAndSkipZM())
return ctx.makeLineString(Collections.<Point>emptyList());
GeometryFactory geometryFactory = ctx.getGeometryFactory();
Coordinate[] coordinates = coordinateSequence(state);
return makeShapeFromGeometry(geometryFactory.createLineString(coordinates));
}
/**
* Parses a POLYGON shape from the raw string. It might return a
* {@link com.spatial4j.core.shape.Rectangle} if the polygon is one.
*
* <pre>
* coordinateSequenceList
* </pre>
*/
protected Shape parsePolygonShape(WKTReader.State state) throws ParseException {
Geometry geometry;
if (state.nextIfEmptyAndSkipZM()) {
GeometryFactory geometryFactory = ctx.getGeometryFactory();
geometry =
geometryFactory
.createPolygon(geometryFactory.createLinearRing(new Coordinate[] {}), null);
} else {
geometry = polygon(state);
if (geometry.isRectangle()) {
// TODO although, might want to never convert if there's a semantic difference (e.g.
// geodetically)
return makeRectFromPoly(geometry);
}
}
return makeShapeFromGeometry(geometry);
}
protected Rectangle makeRectFromPoly(Geometry geometry) {
assert geometry.isRectangle();
Envelope env = geometry.getEnvelopeInternal();
boolean crossesDateline = false;
if (ctx.isGeo() && getDatelineRule() != DatelineRule.none) {
if (getDatelineRule() == DatelineRule.ccwRect) {
// If JTS says it is clockwise, then it's actually a dateline crossing rectangle.
crossesDateline = !CGAlgorithms.isCCW(geometry.getCoordinates());
} else {
crossesDateline = env.getWidth() > 180;
}
}
if (crossesDateline)
return ctx.makeRectangle(env.getMaxX(), env.getMinX(), env.getMinY(), env.getMaxY());
else
return ctx.makeRectangle(env.getMinX(), env.getMaxX(), env.getMinY(), env.getMaxY());
}
/**
* Reads a polygon, returning a JTS polygon.
*/
protected Polygon polygon(WKTReader.State state) throws ParseException {
GeometryFactory geometryFactory = ctx.getGeometryFactory();
List<Coordinate[]> coordinateSequenceList = coordinateSequenceList(state);
LinearRing shell = geometryFactory.createLinearRing(coordinateSequenceList.get(0));
LinearRing[] holes = null;
if (coordinateSequenceList.size() > 1) {
holes = new LinearRing[coordinateSequenceList.size() - 1];
for (int i = 1; i < coordinateSequenceList.size(); i++) {
holes[i - 1] = geometryFactory.createLinearRing(coordinateSequenceList.get(i));
}
}
return geometryFactory.createPolygon(shell, holes);
}
/**
* Parses a MULTIPOLYGON shape from the raw string.
*
* <pre>
* '(' polygon (',' polygon )* ')'
* </pre>
*/
protected Shape parseMulitPolygonShape(WKTReader.State state) throws ParseException {
if (state.nextIfEmptyAndSkipZM())
return ctx.makeCollection(Collections.EMPTY_LIST);
List<Shape> polygons = new ArrayList<Shape>();
state.nextExpect('(');
do {
polygons.add(parsePolygonShape(state));
} while (state.nextIf(','));
state.nextExpect(')');
return ctx.makeCollection(polygons);
}
/**
* Reads a list of JTS Coordinate sequences from the current position.
*
* <pre>
* '(' coordinateSequence (',' coordinateSequence )* ')'
* </pre>
*/
protected List<Coordinate[]> coordinateSequenceList(WKTReader.State state) throws ParseException {
List<Coordinate[]> sequenceList = new ArrayList<Coordinate[]>();
state.nextExpect('(');
do {
sequenceList.add(coordinateSequence(state));
} while (state.nextIf(','));
state.nextExpect(')');
return sequenceList;
}
/**
* Reads a JTS Coordinate sequence from the current position.
*
* <pre>
* '(' coordinate (',' coordinate )* ')'
* </pre>
*/
protected Coordinate[] coordinateSequence(WKTReader.State state) throws ParseException {
List<Coordinate> sequence = new ArrayList<Coordinate>();
state.nextExpect('(');
do {
sequence.add(coordinate(state));
} while (state.nextIf(','));
state.nextExpect(')');
return sequence.toArray(new Coordinate[sequence.size()]);
}
/**
* Reads a {@link com.vividsolutions.jts.geom.Coordinate} from the current position. It's akin to
* {@link #point(com.spatial4j.core.io.WKTReader.State)} but for a JTS Coordinate. Only the first
* 2 numbers are parsed; any remaining are ignored.
*/
protected Coordinate coordinate(WKTReader.State state) throws ParseException {
double x = ctx.normX(state.nextDouble());
ctx.verifyX(x);
double y = ctx.normY(state.nextDouble());
ctx.verifyY(y);
state.skipNextDoubles();
return new Coordinate(x, y);
}
@Override
protected double normDist(double v) {
return ctx.getGeometryFactory().getPrecisionModel().makePrecise(v);
}
/** Creates the JtsGeometry, potentially validating, repairing, and preparing. */
protected JtsGeometry makeShapeFromGeometry(Geometry geometry) {
JtsGeometry jtsGeom;
try {
jtsGeom = ctx.makeShape(geometry);
if (isAutoValidate())
jtsGeom.validate();
} catch (RuntimeException e) {
// repair:
if (validationRule == ValidationRule.repairConvexHull) {
jtsGeom = ctx.makeShape(geometry.convexHull());
} else if (validationRule == ValidationRule.repairBuffer0) {
jtsGeom = ctx.makeShape(geometry.buffer(0));
} else {
// TODO there are other smarter things we could do like repairing inner holes and
// subtracting
// from outer repaired shell; but we needn't try too hard.
throw e;
}
}
if (isAutoIndex())
jtsGeom.index();
return jtsGeom;
}
/**
* Indicates how JTS geometries (notably polygons but applies to other geometries too) are
* validated (if at all) and repaired (if at all).
*/
public enum ValidationRule {
/**
* Geometries will not be validated (because it's kinda expensive to calculate). You may or may
* not ultimately get an error at some point; results are undefined. However, note that
* coordinates will still be validated for falling within the world boundaries.
*
* @see com.vividsolutions.jts.geom.Geometry#isValid().
*/
none,
/**
* Geometries will be explicitly validated on creation, possibly resulting in an exception:
* {@link com.spatial4j.core.exception.InvalidShapeException}.
*/
error,
/**
* Invalid Geometries are repaired by taking the convex hull. The result will very likely be a
* larger shape that matches false-positives, but no false-negatives. See
* {@link com.vividsolutions.jts.geom.Geometry#convexHull()}.
*/
repairConvexHull,
/**
* Invalid polygons are repaired using the {@code buffer(0)} technique. From the <a
* href="http://tsusiatsoftware.net/jts/jts-faq/jts-faq.html">JTS FAQ</a>:
* <p>
* The buffer operation is fairly insensitive to topological invalidity, and the act of
* computing the buffer can often resolve minor issues such as self-intersecting rings. However,
* in some situations the computed result may not be what is desired (i.e. the buffer operation
* may be "confused" by certain topologies, and fail to produce a result which is close to the
* original. An example where this can happen is a "bow-tie: or "figure-8" polygon, with one
* very small lobe and one large one. Depending on the orientations of the lobes, the buffer(0)
* operation may keep the small lobe and discard the "valid" large lobe).
* </p>
*/
repairBuffer0
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.notebook;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars;
import org.apache.zeppelin.display.AngularObject;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.interpreter.InterpreterFactory;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterSetting;
import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry;
import org.apache.zeppelin.notebook.repo.NotebookRepo;
import org.apache.zeppelin.notebook.repo.NotebookRepoSync;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.apache.zeppelin.search.SearchService;
import org.quartz.CronScheduleBuilder;
import org.quartz.CronTrigger;
import org.quartz.JobBuilder;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
import org.quartz.SchedulerException;
import org.quartz.TriggerBuilder;
import org.quartz.impl.StdSchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Collection of Notes.
*/
public class Notebook {
Logger logger = LoggerFactory.getLogger(Notebook.class);
@SuppressWarnings("unused") @Deprecated //TODO(bzz): remove unused
private SchedulerFactory schedulerFactory;
private InterpreterFactory replFactory;
/** Keep the order. */
Map<String, Note> notes = new LinkedHashMap<String, Note>();
private ZeppelinConfiguration conf;
private StdSchedulerFactory quertzSchedFact;
private org.quartz.Scheduler quartzSched;
private JobListenerFactory jobListenerFactory;
private NotebookRepo notebookRepo;
private SearchService notebookIndex;
/**
* Main constructor \w manual Dependency Injection
*
* @param conf
* @param notebookRepo
* @param schedulerFactory
* @param replFactory
* @param jobListenerFactory
* @param notebookIndex - (nullable) for indexing all notebooks on creating.
*
* @throws IOException
* @throws SchedulerException
*/
public Notebook(ZeppelinConfiguration conf, NotebookRepo notebookRepo,
SchedulerFactory schedulerFactory,
InterpreterFactory replFactory, JobListenerFactory jobListenerFactory,
SearchService notebookIndex) throws IOException, SchedulerException {
this.conf = conf;
this.notebookRepo = notebookRepo;
this.schedulerFactory = schedulerFactory;
this.replFactory = replFactory;
this.jobListenerFactory = jobListenerFactory;
this.notebookIndex = notebookIndex;
quertzSchedFact = new org.quartz.impl.StdSchedulerFactory();
quartzSched = quertzSchedFact.getScheduler();
quartzSched.start();
CronJob.notebook = this;
loadAllNotes();
if (this.notebookIndex != null) {
long start = System.nanoTime();
logger.info("Notebook indexing started...");
notebookIndex.addIndexDocs(notes.values());
logger.info("Notebook indexing finished: {} indexed in {}s", notes.size(),
TimeUnit.NANOSECONDS.toSeconds(start - System.nanoTime()));
}
}
/**
* Create new note.
*
* @return
* @throws IOException
*/
public Note createNote() throws IOException {
Note note;
if (conf.getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_AUTO_INTERPRETER_BINDING)) {
note = createNote(replFactory.getDefaultInterpreterSettingList());
} else {
note = createNote(null);
}
notebookIndex.addIndexDoc(note);
return note;
}
/**
* Create new note.
*
* @return
* @throws IOException
*/
public Note createNote(List<String> interpreterIds) throws IOException {
NoteInterpreterLoader intpLoader = new NoteInterpreterLoader(replFactory);
Note note = new Note(notebookRepo, intpLoader, jobListenerFactory, notebookIndex);
intpLoader.setNoteId(note.id());
synchronized (notes) {
notes.put(note.id(), note);
}
if (interpreterIds != null) {
bindInterpretersToNote(note.id(), interpreterIds);
}
notebookIndex.addIndexDoc(note);
note.persist();
return note;
}
/**
* Clone existing note.
* @param sourceNoteID - the note ID to clone
* @param newNoteName - the name of the new note
* @return noteId
* @throws IOException, CloneNotSupportedException, IllegalArgumentException
*/
public Note cloneNote(String sourceNoteID, String newNoteName) throws
IOException, CloneNotSupportedException, IllegalArgumentException {
Note sourceNote = getNote(sourceNoteID);
if (sourceNote == null) {
throw new IllegalArgumentException(sourceNoteID + "not found");
}
Note newNote = createNote();
if (newNoteName != null) {
newNote.setName(newNoteName);
}
// Copy the interpreter bindings
List<String> boundInterpreterSettingsIds = getBindedInterpreterSettingsIds(sourceNote.id());
bindInterpretersToNote(newNote.id(), boundInterpreterSettingsIds);
List<Paragraph> paragraphs = sourceNote.getParagraphs();
for (Paragraph p : paragraphs) {
newNote.addCloneParagraph(p);
}
notebookIndex.addIndexDoc(newNote);
newNote.persist();
return newNote;
}
public void bindInterpretersToNote(String id,
List<String> interpreterSettingIds) throws IOException {
Note note = getNote(id);
if (note != null) {
note.getNoteReplLoader().setInterpreters(interpreterSettingIds);
replFactory.putNoteInterpreterSettingBinding(id, interpreterSettingIds);
}
}
public List<String> getBindedInterpreterSettingsIds(String id) {
Note note = getNote(id);
if (note != null) {
return note.getNoteReplLoader().getInterpreters();
} else {
return new LinkedList<String>();
}
}
public List<InterpreterSetting> getBindedInterpreterSettings(String id) {
Note note = getNote(id);
if (note != null) {
return note.getNoteReplLoader().getInterpreterSettings();
} else {
return new LinkedList<InterpreterSetting>();
}
}
public Note getNote(String id) {
synchronized (notes) {
return notes.get(id);
}
}
public void removeNote(String id) {
Note note;
synchronized (notes) {
note = notes.remove(id);
}
notebookIndex.deleteIndexDocs(note);
// remove from all interpreter instance's angular object registry
for (InterpreterSetting settings : replFactory.get()) {
AngularObjectRegistry registry = settings.getInterpreterGroup().getAngularObjectRegistry();
if (registry instanceof RemoteAngularObjectRegistry) {
((RemoteAngularObjectRegistry) registry).removeAllAndNotifyRemoteProcess(id);
} else {
registry.removeAll(id);
}
}
try {
note.unpersist();
} catch (IOException e) {
e.printStackTrace();
}
}
@SuppressWarnings("rawtypes")
private Note loadNoteFromRepo(String id) {
Note note = null;
try {
note = notebookRepo.get(id);
} catch (IOException e) {
logger.error("Failed to load " + id, e);
}
if (note == null) {
return null;
}
//Manually inject ALL dependencies, as DI constructor was NOT used
note.setIndex(this.notebookIndex);
NoteInterpreterLoader replLoader = new NoteInterpreterLoader(replFactory);
note.setReplLoader(replLoader);
replLoader.setNoteId(note.id());
note.setJobListenerFactory(jobListenerFactory);
note.setNotebookRepo(notebookRepo);
Map<String, SnapshotAngularObject> angularObjectSnapshot = new HashMap<>();
// restore angular object --------------
Date lastUpdatedDate = new Date(0);
for (Paragraph p : note.getParagraphs()) {
p.setNote(note);
if (p.getDateFinished() != null &&
lastUpdatedDate.before(p.getDateFinished())) {
lastUpdatedDate = p.getDateFinished();
}
}
Map<String, List<AngularObject>> savedObjects = note.getAngularObjects();
if (savedObjects != null) {
for (String intpGroupName : savedObjects.keySet()) {
List<AngularObject> objectList = savedObjects.get(intpGroupName);
for (AngularObject object : objectList) {
SnapshotAngularObject snapshot = angularObjectSnapshot.get(object.getName());
if (snapshot == null || snapshot.getLastUpdate().before(lastUpdatedDate)) {
angularObjectSnapshot.put(object.getName(),
new SnapshotAngularObject(intpGroupName, object, lastUpdatedDate));
}
}
}
}
synchronized (notes) {
notes.put(note.id(), note);
refreshCron(note.id());
}
for (String name : angularObjectSnapshot.keySet()) {
SnapshotAngularObject snapshot = angularObjectSnapshot.get(name);
List<InterpreterSetting> settings = replFactory.get();
for (InterpreterSetting setting : settings) {
InterpreterGroup intpGroup = setting.getInterpreterGroup();
if (intpGroup.getId().equals(snapshot.getIntpGroupId())) {
AngularObjectRegistry registry = intpGroup.getAngularObjectRegistry();
String noteId = snapshot.getAngularObject().getNoteId();
// at this point, remote interpreter process is not created.
// so does not make sense add it to the remote.
//
// therefore instead of addAndNotifyRemoteProcess(), need to use add()
// that results add angularObject only in ZeppelinServer side not remoteProcessSide
registry.add(name, snapshot.getAngularObject().get(), noteId);
}
}
}
return note;
}
private void loadAllNotes() throws IOException {
List<NoteInfo> noteInfos = notebookRepo.list();
for (NoteInfo info : noteInfos) {
loadNoteFromRepo(info.getId());
}
}
/**
* Reload all notes from repository after clearing `notes`
* to reflect the changes of added/deleted/modified notebooks on file system level.
*
* @return
* @throws IOException
*/
public void reloadAllNotes() throws IOException {
synchronized (notes) {
notes.clear();
}
if (notebookRepo instanceof NotebookRepoSync) {
NotebookRepoSync mainRepo = (NotebookRepoSync) notebookRepo;
if (mainRepo.getRepoCount() > 1) {
mainRepo.sync();
}
}
List<NoteInfo> noteInfos = notebookRepo.list();
for (NoteInfo info : noteInfos) {
loadNoteFromRepo(info.getId());
}
}
@SuppressWarnings("rawtypes")
class SnapshotAngularObject {
String intpGroupId;
AngularObject angularObject;
Date lastUpdate;
public SnapshotAngularObject(String intpGroupId,
AngularObject angularObject, Date lastUpdate) {
super();
this.intpGroupId = intpGroupId;
this.angularObject = angularObject;
this.lastUpdate = lastUpdate;
}
public String getIntpGroupId() {
return intpGroupId;
}
public AngularObject getAngularObject() {
return angularObject;
}
public Date getLastUpdate() {
return lastUpdate;
}
}
public List<Note> getAllNotes() {
synchronized (notes) {
List<Note> noteList = new ArrayList<Note>(notes.values());
Collections.sort(noteList, new Comparator<Note>() {
@Override
public int compare(Note note1, Note note2) {
String name1 = note1.id();
if (note1.getName() != null) {
name1 = note1.getName();
}
String name2 = note2.id();
if (note2.getName() != null) {
name2 = note2.getName();
}
return name1.compareTo(name2);
}
});
return noteList;
}
}
public JobListenerFactory getJobListenerFactory() {
return jobListenerFactory;
}
public void setJobListenerFactory(JobListenerFactory jobListenerFactory) {
this.jobListenerFactory = jobListenerFactory;
}
/**
* Cron task for the note.
*/
public static class CronJob implements org.quartz.Job {
public static Notebook notebook;
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
String noteId = context.getJobDetail().getJobDataMap().getString("noteId");
Note note = notebook.getNote(noteId);
note.runAll();
while (!note.getLastParagraph().isTerminated()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
boolean releaseResource = false;
try {
releaseResource = (boolean) note.getConfig().get("releaseresource");
} catch (java.lang.ClassCastException e) {
e.printStackTrace();
}
if (releaseResource) {
for (InterpreterSetting setting : note.getNoteReplLoader().getInterpreterSettings()) {
notebook.getInterpreterFactory().restart(setting.id());
}
}
}
}
public void refreshCron(String id) {
removeCron(id);
synchronized (notes) {
Note note = notes.get(id);
if (note == null) {
return;
}
Map<String, Object> config = note.getConfig();
if (config == null) {
return;
}
String cronExpr = (String) note.getConfig().get("cron");
if (cronExpr == null || cronExpr.trim().length() == 0) {
return;
}
JobDetail newJob =
JobBuilder.newJob(CronJob.class).withIdentity(id, "note").usingJobData("noteId", id)
.build();
Map<String, Object> info = note.getInfo();
info.put("cron", null);
CronTrigger trigger = null;
try {
trigger =
TriggerBuilder.newTrigger().withIdentity("trigger_" + id, "note")
.withSchedule(CronScheduleBuilder.cronSchedule(cronExpr)).forJob(id, "note")
.build();
} catch (Exception e) {
logger.error("Error", e);
info.put("cron", e.getMessage());
}
try {
if (trigger != null) {
quartzSched.scheduleJob(newJob, trigger);
}
} catch (SchedulerException e) {
logger.error("Error", e);
info.put("cron", "Scheduler Exception");
}
}
}
private void removeCron(String id) {
try {
quartzSched.deleteJob(new JobKey(id, "note"));
} catch (SchedulerException e) {
logger.error("Can't remove quertz " + id, e);
}
}
public InterpreterFactory getInterpreterFactory() {
return replFactory;
}
public ZeppelinConfiguration getConf() {
return conf;
}
public void close() {
this.notebookRepo.close();
this.notebookIndex.close();
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.java.analysis.JavaAnalysisBundle;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.siyeh.ig.psiutils.CommentTracker;
import com.siyeh.ig.psiutils.ControlFlowUtils;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.ParenthesesUtils;
import com.siyeh.ig.style.SimplifiableIfStatementInspection;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.intellij.util.ObjectUtils.tryCast;
public class UseCompareMethodInspection extends AbstractBaseJavaLocalInspectionTool {
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
if (!PsiUtil.getLanguageLevel(holder.getFile()).isAtLeast(LanguageLevel.JDK_1_4)) {
return PsiElementVisitor.EMPTY_VISITOR;
}
return new JavaElementVisitor() {
@Override
public void visitMethodCallExpression(PsiMethodCallExpression call) {
CompareInfo info = fromCall(call);
PsiElement nameElement = call.getMethodExpression().getReferenceNameElement();
if (info != null && nameElement != null) {
register(info, nameElement);
}
}
@Override
public void visitIfStatement(PsiIfStatement statement) {
CompareInfo info = fromIf(statement);
PsiElement keyword = statement.getFirstChild();
if (info != null && keyword != null) {
register(info, keyword);
}
}
@Override
public void visitConditionalExpression(PsiConditionalExpression expression) {
CompareInfo info = fromTernary(expression);
if (info != null) {
register(info, expression);
}
}
private void register(CompareInfo info, PsiElement nameElement) {
holder.registerProblem(nameElement, JavaAnalysisBundle.message("inspection.can.be.replaced.with.message", info.myClass.getClassName() + ".compare"),
new ReplaceWithPrimitiveCompareFix(info.getReplacementText()));
}
};
}
private static CompareInfo fromIf(PsiIfStatement ifStatement) {
PsiExpression firstCondition = ifStatement.getCondition();
if (firstCondition == null) return null;
PsiIfStatement elseIfStatement = tryCast(getElse(ifStatement), PsiIfStatement.class);
if (elseIfStatement == null) return null;
PsiExpression secondCondition = elseIfStatement.getCondition();
if (secondCondition == null) return null;
PsiStatement firstStatement = ControlFlowUtils.stripBraces(ifStatement.getThenBranch());
if (firstStatement == null) return null;
PsiStatement secondStatement = ControlFlowUtils.stripBraces(elseIfStatement.getThenBranch());
if (secondStatement == null) return null;
PsiStatement thirdStatement = getElse(elseIfStatement);
if (thirdStatement == null) return null;
Map<Integer, PsiExpression> result = new HashMap<>(3);
// like if(...) return 1; else if(...) return -1; return 0;
if (firstStatement instanceof PsiReturnStatement) {
if (!(secondStatement instanceof PsiReturnStatement) || !(thirdStatement instanceof PsiReturnStatement)) return null;
PsiExpression firstValue = ((PsiReturnStatement)firstStatement).getReturnValue();
if (!storeCondition(result, firstCondition, firstValue)) return null;
if (!storeCondition(result, secondCondition, ((PsiReturnStatement)secondStatement).getReturnValue())) return null;
if (!storeCondition(result, null, ((PsiReturnStatement)thirdStatement).getReturnValue())) return null;
return fromMap(result, firstValue, firstStatement);
}
// like if(...) x = 1; else if(...) x = -1; else x = 0;
PsiAssignmentExpression assignment = ExpressionUtils.getAssignment(firstStatement);
if (assignment == null) return null;
PsiReferenceExpression ref = tryCast(assignment.getLExpression(), PsiReferenceExpression.class);
if (ref == null) return null;
PsiVariable variable = tryCast(ref.resolve(), PsiVariable.class);
if (variable == null) return null;
PsiExpression firstExpression = assignment.getRExpression();
if (!storeCondition(result, firstCondition, firstExpression)) return null;
if (!storeCondition(result, secondCondition, ExpressionUtils.getAssignmentTo(secondStatement, variable))) return null;
if (!storeCondition(result, null, ExpressionUtils.getAssignmentTo(thirdStatement, variable))) return null;
return fromMap(result, firstExpression, firstStatement);
}
private static PsiStatement getElse(PsiIfStatement ifStatement) {
PsiStatement branch = ControlFlowUtils.stripBraces(ifStatement.getElseBranch());
if (branch != null) return branch;
PsiStatement thenBranch = ControlFlowUtils.stripBraces(ifStatement.getThenBranch());
if (!(thenBranch instanceof PsiReturnStatement)) return null;
PsiElement next = PsiTreeUtil.skipWhitespacesAndCommentsForward(ifStatement);
return tryCast(next, PsiStatement.class);
}
@Nullable
private static Map<Integer, PsiExpression> extractConditions(PsiConditionalExpression ternary) {
Map<Integer, PsiExpression> result = new HashMap<>(3);
if (!storeCondition(result, ternary.getCondition(), ternary.getThenExpression())) return null;
PsiExpression elseExpression = PsiUtil.skipParenthesizedExprDown(ternary.getElseExpression());
if (elseExpression instanceof PsiConditionalExpression) {
Map<Integer, PsiExpression> m = extractConditions((PsiConditionalExpression)elseExpression);
if (m == null) return null;
result.putAll(m);
return result;
}
return storeCondition(result, null, elseExpression) ? result : null;
}
@Contract("_, _, null -> false")
private static boolean storeCondition(@NotNull Map<Integer, PsiExpression> result,
@Nullable PsiExpression condition,
@Nullable PsiExpression expression) {
if (expression == null) return false;
Object thenValue = ExpressionUtils.computeConstantExpression(expression);
if (!(thenValue instanceof Integer) || Math.abs((Integer)thenValue) > 1) return false;
result.put((Integer)thenValue, condition);
return true;
}
private static CompareInfo fromTernary(PsiConditionalExpression ternary) {
if (!PsiType.INT.equals(ternary.getType())) return null;
Map<Integer, PsiExpression> map = extractConditions(ternary);
return fromMap(map, ternary, ternary);
}
private static CompareInfo fromMap(@Nullable Map<Integer, PsiExpression> map,
@NotNull PsiExpression expression,
@NotNull PsiElement template) {
if (map == null || map.size() != 3) {
return null;
}
PsiExpression lt = map.get(-1);
Pair<PsiExpression, PsiExpression> ltPair = getOperands(lt, JavaTokenType.LT);
if (lt != null && ltPair == null) return null;
PsiExpression gt = map.get(1);
Pair<PsiExpression, PsiExpression> gtPair = getOperands(gt, JavaTokenType.GT);
if ((gt != null || ltPair == null) && gtPair == null) return null;
if (ltPair != null && gtPair != null) {
if (!PsiEquivalenceUtil.areElementsEquivalent(ltPair.getFirst(), gtPair.getFirst())) return null;
if (!PsiEquivalenceUtil.areElementsEquivalent(ltPair.getSecond(), gtPair.getSecond())) return null;
}
Pair<PsiExpression, PsiExpression> canonicalPair = ltPair == null ? gtPair : ltPair;
PsiType leftType = canonicalPair.getFirst().getType();
PsiType rightType = canonicalPair.getSecond().getType();
if (!isTypeConvertible(leftType, expression) || !leftType.equals(rightType)) return null;
PsiExpression eq = map.get(0);
Pair<PsiExpression, PsiExpression> eqPair = getOperands(eq, JavaTokenType.EQEQ);
if (eq != null && eqPair == null) return null;
if (eqPair != null) {
if ((!PsiEquivalenceUtil.areElementsEquivalent(canonicalPair.getFirst(), eqPair.getFirst()) ||
!PsiEquivalenceUtil.areElementsEquivalent(canonicalPair.getSecond(), eqPair.getSecond())) &&
(!PsiEquivalenceUtil.areElementsEquivalent(canonicalPair.getFirst(), eqPair.getSecond()) ||
!PsiEquivalenceUtil.areElementsEquivalent(canonicalPair.getSecond(), eqPair.getFirst()))) {
return null;
}
}
PsiClassType boxedType = leftType instanceof PsiPrimitiveType ? ((PsiPrimitiveType)leftType).getBoxedType(expression) :
tryCast(leftType, PsiClassType.class);
if (boxedType == null) return null;
return new CompareInfo(template, expression, canonicalPair.getFirst(), canonicalPair.getSecond(), boxedType);
}
private static Pair<PsiExpression, PsiExpression> getOperands(PsiExpression expression, IElementType expectedToken) {
expression = PsiUtil.skipParenthesizedExprDown(expression);
if (!(expression instanceof PsiBinaryExpression)) return null;
PsiBinaryExpression binOp = (PsiBinaryExpression)expression;
PsiExpression left = PsiUtil.skipParenthesizedExprDown(binOp.getLOperand());
PsiExpression right = PsiUtil.skipParenthesizedExprDown(binOp.getROperand());
if (left == null || right == null) return null;
if (binOp.getOperationTokenType().equals(expectedToken)) {
return Pair.create(left, right);
}
if (expectedToken.equals(JavaTokenType.GT) && binOp.getOperationTokenType().equals(JavaTokenType.LT) ||
expectedToken.equals(JavaTokenType.LT) && binOp.getOperationTokenType().equals(JavaTokenType.GT)) {
return Pair.create(right, left);
}
return null;
}
@Contract("null -> null")
private static CompareInfo fromCall(PsiMethodCallExpression call) {
if (call == null) return null;
PsiElement nameElement = call.getMethodExpression().getReferenceNameElement();
if (nameElement == null) return null;
String name = nameElement.getText();
if (!"compareTo".equals(name)) return null;
PsiExpression[] args = call.getArgumentList().getExpressions();
if (args.length != 1) return null;
PsiExpression arg = args[0];
PsiExpression qualifier = call.getMethodExpression().getQualifierExpression();
if (qualifier == null) return null;
PsiClassType boxedType = getBoxedType(call);
if (boxedType == null) return null;
PsiPrimitiveType primitiveType = PsiPrimitiveType.getUnboxedType(boxedType);
if (!isTypeConvertible(primitiveType, call)) return null;
PsiExpression left = extractPrimitive(boxedType, primitiveType, qualifier);
if (left == null) return null;
PsiExpression right = extractPrimitive(boxedType, primitiveType, arg);
if (right == null) return null;
return new CompareInfo(call, call, left, right, boxedType);
}
@Nullable
static PsiClassType getBoxedType(PsiMethodCallExpression call) {
PsiMethod method = call.resolveMethod();
if (method == null) return null;
PsiClass aClass = method.getContainingClass();
if (aClass == null) return null;
return JavaPsiFacade.getElementFactory(call.getProject()).createType(aClass);
}
@Nullable
static PsiExpression extractPrimitive(PsiClassType type, PsiPrimitiveType primitiveType, PsiExpression expression) {
expression = PsiUtil.skipParenthesizedExprDown(expression);
if (expression == null) return null;
if (primitiveType.equals(expression.getType())) {
return expression;
}
if (expression instanceof PsiMethodCallExpression) {
PsiMethodCallExpression call = (PsiMethodCallExpression)expression;
if (!"valueOf".equals(call.getMethodExpression().getReferenceName())) return null;
PsiExpression[] args = call.getArgumentList().getExpressions();
if (args.length != 1) return null;
PsiMethod method = call.resolveMethod();
if (method == null || type.resolve() != method.getContainingClass()) return null;
return checkPrimitive(args[0]);
}
if (expression instanceof PsiTypeCastExpression) {
PsiTypeCastExpression cast = (PsiTypeCastExpression)expression;
if (!type.equals(cast.getType())) return null;
return checkPrimitive(cast.getOperand());
}
if (expression instanceof PsiNewExpression) {
PsiNewExpression newExpression = (PsiNewExpression)expression;
if (!type.equals(newExpression.getType())) return null;
PsiExpressionList argumentList = newExpression.getArgumentList();
if (argumentList == null) return null;
PsiExpression[] args = argumentList.getExpressions();
if (args.length != 1) return null;
if (!(args[0].getType() instanceof PsiPrimitiveType)) return null;
return checkPrimitive(args[0]);
}
return null;
}
private static PsiExpression checkPrimitive(PsiExpression expression) {
return expression != null && expression.getType() instanceof PsiPrimitiveType ? expression : null;
}
@Contract("null, _ -> false")
private static boolean isTypeConvertible(PsiType type, PsiElement context) {
type = PsiPrimitiveType.getOptionallyUnboxedType(type);
return type != null && (PsiType.DOUBLE.equals(type) ||
PsiType.FLOAT.equals(type) ||
PsiUtil.isLanguageLevel7OrHigher(context));
}
static class CompareInfo {
final @NotNull PsiElement myTemplate;
final @NotNull PsiExpression myToReplace;
final @NotNull PsiExpression myLeft;
final @NotNull PsiExpression myRight;
final @NotNull PsiClassType myClass;
CompareInfo(@NotNull PsiElement template,
@NotNull PsiExpression toReplace,
@NotNull PsiExpression left,
@NotNull PsiExpression right,
@NotNull PsiClassType aClass) {
myTemplate = template;
myToReplace = toReplace;
myLeft = left;
myRight = right;
myClass = aClass;
}
private @NotNull PsiElement replace(PsiElement toReplace, CommentTracker ct) {
String replacement;
if (this.myLeft.getType() instanceof PsiClassType) {
replacement = ct.text(this.myLeft, ParenthesesUtils.METHOD_CALL_PRECEDENCE) + ".compareTo(" + ct.text(this.myRight) + ")";
} else {
replacement = this.myClass.getCanonicalText() + ".compare(" + ct.text(this.myLeft) + "," + ct.text(this.myRight) + ")";
}
if(toReplace == myTemplate) {
return ct.replaceAndRestoreComments(myToReplace, replacement);
} else {
ct.replace(myToReplace, replacement);
return ct.replaceAndRestoreComments(toReplace, myTemplate);
}
}
public String getReplacementText() {
String methodName = this.myLeft.getType() instanceof PsiClassType ? "compareTo" : "compare";
return myClass.getName()+"."+methodName+"()";
}
}
private static class ReplaceWithPrimitiveCompareFix implements LocalQuickFix {
private final String myReplacementText;
ReplaceWithPrimitiveCompareFix(String replacementText) {
myReplacementText = replacementText;
}
@Nls
@NotNull
@Override
public String getName() {
return CommonQuickFixBundle.message("fix.replace.with.x", myReplacementText);
}
@Nls
@NotNull
@Override
public String getFamilyName() {
return JavaAnalysisBundle.message("inspection.use.compare.method.fix.family.name");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiElement element = descriptor.getStartElement();
PsiElement toReplace;
List<PsiElement> toDelete = new ArrayList<>();
CompareInfo info;
if (element instanceof PsiConditionalExpression) {
toReplace = element;
info = fromTernary((PsiConditionalExpression)element);
}
else {
PsiElement parent = element.getParent();
if (parent instanceof PsiIfStatement) {
toReplace = parent;
info = fromIf((PsiIfStatement)parent);
PsiStatement elseIf = getElse((PsiIfStatement)parent);
toDelete.add(elseIf);
if(elseIf instanceof PsiIfStatement) {
toDelete.add(getElse((PsiIfStatement)elseIf));
}
} else {
PsiMethodCallExpression call = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
info = fromCall(call);
toReplace = call;
}
}
if (info == null) return;
CommentTracker ct = new CommentTracker();
PsiElement result = info.replace(toReplace, ct);
StreamEx.of(toDelete).nonNull().filter(PsiElement::isValid).forEach(e -> new CommentTracker().deleteAndRestoreComments(e));
SimplifiableIfStatementInspection.tryJoinDeclaration(result);
}
}
}
| |
package com.bazaarvoice.emodb.sor.condition.impl;
import com.bazaarvoice.emodb.sor.condition.Condition;
import com.bazaarvoice.emodb.sor.condition.ConditionVisitor;
import com.bazaarvoice.emodb.sor.condition.Conditions;
import com.bazaarvoice.emodb.sor.condition.LikeCondition;
import com.bazaarvoice.emodb.sor.delta.deser.DeltaJson;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.Writer;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
abstract public class LikeConditionImpl extends AbstractCondition implements LikeCondition {
private final String _condition;
public static LikeConditionImpl create(Object value) {
checkArgument(value instanceof String, "Like expression only supports strings");
return create(value.toString());
}
public static LikeConditionImpl create(final String condition) {
checkNotNull(condition, "Like expression cannot be null");
// Optimize for the most common case where an expression contains a single wildcard.
int firstWildcard = -1;
List<Integer> remainingWildcards = null;
String unescaped = condition;
int length = unescaped.length();
int i = 0;
while (i < length) {
switch (unescaped.charAt(i)) {
case '\\':
if (i == length-1) {
throw new IllegalArgumentException("Invalid terminal escape character at position " + i);
}
// Remove the escape character and preserve the following character.
// For example, "abc\\*def" becomes "abc*def" and evaluation of the string
// continues at the first character after the '*' ('d').
unescaped = unescaped.substring(0, i) + unescaped.substring(i+1);
length -= 1;
break;
case '*':
// Record the index of the wildcard
if (firstWildcard == -1) {
firstWildcard = i;
} else {
if (remainingWildcards == null) {
remainingWildcards = Lists.newArrayListWithCapacity(3);
}
remainingWildcards.add(i);
}
// Consecutive wildcards are redundant. If there are any remove them now.
int endConsecWilds = i+1;
while (endConsecWilds != length && unescaped.charAt(endConsecWilds) == '*') {
endConsecWilds += 1;
}
if (endConsecWilds != i+1) {
unescaped = unescaped.substring(0, i+1) + unescaped.substring(endConsecWilds);
length -= endConsecWilds - i - 1;
}
break;
}
i += 1;
}
if (firstWildcard == -1) {
// There were no wildcards. Ideally the caller should use a simple equality condition. We'll
// optimize by returning a predicate which performs a simple equality check.
return new ExactMatch(condition, unescaped);
}
if (length == 1) {
// The entire string was nothing but wildcards. Ideally the caller should use "is(string)" instead.
return AnyString.getInstance(condition);
}
if (remainingWildcards == null) {
// Simple case where there is exactly one wildcard in the expression
if (firstWildcard == 0) {
// Suffix case, such as "*:testcustomer"
return new EndsWith(condition, unescaped.substring(1));
} else if (firstWildcard == length-1) {
// Prefix case, such as "review:*"
return new StartsWith(condition, unescaped.substring(0, firstWildcard));
} else {
// Surrounds case, such as "source:*:testcustomer"
return new Surrounds(condition, unescaped.substring(0, firstWildcard), unescaped.substring(firstWildcard+1));
}
}
// Multiple wildcards. The final optimization is the contains case, such as "*review*"
if (firstWildcard == 0 && remainingWildcards.size() == 1 && remainingWildcards.get(0) == length-1) {
return new Contains(condition, unescaped.substring(1, length-1));
}
// Break the string up into constant substrings separated by wildcards. Notice that if an expressions
// starts with a wildcard then the first substring will be the empty string, "". This is intentional since
// the empty string will match the beginning of all input strings. The same logic applies if the
// expression ends with a wildcard.
List<String> substrings = Lists.newArrayListWithCapacity(remainingWildcards.size() + 2);
substrings.add(unescaped.substring(0, firstWildcard));
for (int nextWildcard : remainingWildcards) {
substrings.add(unescaped.substring(firstWildcard+1, nextWildcard));
firstWildcard = nextWildcard;
}
substrings.add(unescaped.substring(firstWildcard+1));
return new Complex(condition, substrings);
}
protected LikeConditionImpl(String condition) {
_condition = condition;
}
@Override
public <T, V> V visit(ConditionVisitor<T, V> visitor, @Nullable T context) {
return visitor.visit(this, context);
}
@Override
public void appendTo(Appendable buf) throws IOException {
// Use a writer so the re can be correctly converted to json using DeltaJson.
Writer out = CharStreams.asWriter(buf);
out.write("like(");
DeltaJson.write(out, _condition);
out.write(")");
}
@Override
public String getCondition() {
return _condition;
}
@Override
public boolean overlaps(LikeCondition condition) {
// If either condition is a constant then the other condition must contain the the condition's string to overlap.
// For example, "door" overlaps "d*r"
if (!hasWildcards()) {
return condition.matches(getCondition());
} else if (!condition.hasWildcards()) {
return matches(condition.getCondition());
}
// Any internal wildcards surrounded by constants can match any other internal values, so determining overlap
// only depends on the prefixes and suffixes.
String prefix = getPrefix();
String otherPrefix = condition.getPrefix();
String suffix = getSuffix();
String otherSuffix = condition.getSuffix();
return (prefix == null || otherPrefix == null || prefix.startsWith(otherPrefix) || otherPrefix.startsWith(prefix)) &&
(suffix == null || otherSuffix == null || suffix.endsWith(otherSuffix) || otherSuffix.endsWith(suffix));
}
@Override
public boolean isSubsetOf(LikeCondition condition) {
// This condition is a subset of the other condition if this condition, with all wildcards replaced with
// unique characters, matches the other condition.
String testString = substituteWildcardsWith("\u0000");
return condition.matches(testString);
}
/**
* Default implementation returns null, subclasses with a prefix must override.
*/
@Override
public String getPrefix() {
return null;
}
/**
* Default implementation returns null, subclasses with a suffix must override.
*/
@Override
public String getSuffix() {
return null;
}
/**
* Default implementation returns true, the one subclass where this is false, {@link ExactMatch}, overrides.
*/
@Override
public boolean hasWildcards() {
return true;
}
/**
* Returns this condition with all wildcards substituted with the provided string.
*/
abstract protected String substituteWildcardsWith(String substitute);
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof LikeCondition)) {
return false;
}
LikeConditionImpl that = (LikeConditionImpl) o;
return _condition.equals(that._condition);
}
@Override
public int hashCode() {
return _condition.hashCode();
}
/**
* Returns a simpler equivalent representation of this same condition if one exists. For example,
* <code>like("constant_string")</code> can be reduced to the equality condition "constant_string".
* By default the base class returns itself; subclasses can override as appropriate.
*/
public Condition simplify() {
return this;
}
/** Implementation for exactly matching a string, such as "review:client" */
public static class ExactMatch extends LikeConditionImpl {
private final String _expression;
private ExactMatch(String condition, String expression) {
super(condition);
_expression = expression;
}
@Override
public boolean matches(String input) {
return _expression.equals(input);
}
@Override
public Condition simplify() {
return Conditions.equal(_expression);
}
@Override
public boolean hasWildcards() {
return false;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _expression;
}
}
/** Implementation for matching all strings, such as "*" */
public static class AnyString extends LikeConditionImpl {
public static AnyString _defaultInstance = new AnyString("*");
private static AnyString getInstance(String condition) {
// Most frequently the condition that spawned this instance is a simple single wildcard character,
// "*". If this is the case then reuse the default singleton. Otherwise create a new instance
// to preserve the original condition.
if ("*".equals(condition)) {
return _defaultInstance;
}
return new AnyString(condition);
}
private AnyString(String condition) {
super(condition);
}
@Override
public boolean matches(String input) {
return true;
}
@Override
public Condition simplify() {
return Conditions.isString();
}
@Override
protected String substituteWildcardsWith(String substitute) {
return substitute;
}
}
/** Implementation for matching a prefix, such as "review:*" */
public static class StartsWith extends LikeConditionImpl {
private final String _prefix;
private StartsWith(String condition, String prefix) {
super(condition);
_prefix = prefix;
}
@Override
public boolean matches(String input) {
return input.startsWith(_prefix);
}
@Override
public String getPrefix() {
return _prefix;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _prefix + substitute;
}
}
/** Implementation for matching a suffix, such as "*:client" */
public static class EndsWith extends LikeConditionImpl {
private final String _suffix;
private EndsWith(String condition, String suffix) {
super(condition);
_suffix = suffix;
}
@Override
public boolean matches(String input) {
return input.endsWith(_suffix);
}
@Override
public String getSuffix() {
return _suffix;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return substitute + _suffix;
}
}
/** Implementation for matching surrounded wildcard, such as "group:*:client" */
public static class Surrounds extends LikeConditionImpl {
private final String _prefix;
private final String _suffix;
private final int _minLength;
private Surrounds(String condition, String prefix, String suffix) {
super(condition);
_prefix = prefix;
_suffix = suffix;
_minLength = _prefix.length() + _suffix.length();
}
@Override
public boolean matches(String input) {
return input.length() >= _minLength &&
input.startsWith(_prefix) &&
input.endsWith(_suffix);
}
@Override
public String getPrefix() {
return _prefix;
}
@Override
public String getSuffix() {
return _suffix;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _prefix + substitute + _suffix;
}
}
/** Implementation for matching a contained expression, such as "*client*" */
public static class Contains extends LikeConditionImpl {
private final String _expression;
private Contains(String condition, String expression) {
super(condition);
_expression = expression;
}
@Override
public boolean matches(String input) {
return input.contains(_expression);
}
@Override
protected String substituteWildcardsWith(String substitute) {
return substitute + _expression + substitute;
}
}
/**
* Implementation for matching complex expressions with multiple wildcards that doesn't match
* any of the previous more efficient computations.
*/
public static class Complex extends LikeConditionImpl {
private final String _prefix;
private final String _suffix;
private final List<String> _innerSubstrings;
private final int _minLength;
private Complex(String condition, List<String> substrings) {
super(condition);
int length = substrings.size();
_prefix = substrings.get(0);
_suffix = substrings.get(length-1);
_innerSubstrings = ImmutableList.copyOf(substrings.subList(1, length-1));
int minLength = 0;
for (String substring : substrings) {
minLength += substring.length();
}
_minLength = minLength;
}
@Override
public boolean matches(String input) {
// Fastest initial checks are whether the total string is at least as long as all substrings
// followed by a prefix and suffix check
if (input.length() < _minLength || !input.startsWith(_prefix) || !input.endsWith(_suffix)) {
return false;
}
// Ensure each inner string appears in-order non-overlapping within the input string starting
// after the prefix.
int idx = _prefix.length();
for (String substring : _innerSubstrings) {
if ((idx = input.indexOf(substring, idx)) == -1) {
return false;
}
idx += substring.length();
}
// Ensure the final inner string terminated before the suffix
return idx <= input.length() - _suffix.length();
}
@Override
public String getPrefix() {
return _prefix.length() != 0 ? _prefix : null;
}
@Override
public String getSuffix() {
return _suffix.length() != 0 ? _suffix : null;
}
@Override
protected String substituteWildcardsWith(String substitute) {
return _prefix + substitute +
Joiner.on(substitute).join(_innerSubstrings) +
substitute + _suffix;
}
}
}
| |
/*
* Copyright (c) 2005-2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.provider.openid.extensions;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openid4java.message.AuthRequest;
import org.openid4java.message.MessageException;
import org.openid4java.message.MessageExtension;
import org.openid4java.message.ax.AxMessage;
import org.openid4java.message.sreg.SRegMessage;
import org.openid4java.message.sreg.SRegRequest;
import org.openid4java.message.sreg.SRegResponse;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.provider.dto.OpenIDAuthRequestDTO;
import org.wso2.carbon.identity.provider.dto.OpenIDClaimDTO;
import org.wso2.carbon.identity.provider.openid.handlers.OpenIDAuthenticationRequest;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/**
* Functionality related OpenID Simple Registration. OpenID Simple Registration is an extension to
* the OpenID Authentication protocol that allows for very light-weight profile exchange. It is
* designed to pass eight commonly requested pieces of information when an End User goes to register
* a new account with a web service.
*/
public class OpenIDSimpleReg extends OpenIDExtension {
private static final Log log = LogFactory.getLog(OpenIDSimpleReg.class);
private OpenIDAuthenticationRequest request;
/**
* @param request An instance of OpenIDAuthenticationRequest
*/
public OpenIDSimpleReg(OpenIDAuthenticationRequest request) throws IdentityException {
if (request == null) {
log.debug("Request cannot be null while initializing OpenIDSimpleReg");
throw IdentityException.error("Request cannot be null while initializing OpenIDSimpleReg");
}
this.request = request;
}
/**
* {@inheritDoc}
*/
@Override
public void addRequiredAttributes(List<String> requiredAttributes) throws IdentityException {
AuthRequest authRequest = null;
MessageExtension extension = null;
try {
authRequest = request.getAuthRequest();
if (authRequest != null) {
if (authRequest.hasExtension(SRegMessage.OPENID_NS_SREG)) {
extension = authRequest.getExtension(SRegMessage.OPENID_NS_SREG);
} else if (authRequest.hasExtension(SRegMessage.OPENID_NS_SREG11)) {
extension = authRequest.getExtension(SRegMessage.OPENID_NS_SREG11);
} else if (authRequest.hasExtension(AxMessage.OPENID_NS_AX)) {
extension = authRequest.getExtension(AxMessage.OPENID_NS_AX);
}
if (extension instanceof SRegRequest) {
SRegRequest sregReq = null;
List required = null;
List optional = null;
sregReq = (SRegRequest) extension;
// Get the required attributes as requested by the RP.
required = sregReq.getAttributes(true);
optional = sregReq.getAttributes();
if (optional != null && !optional.isEmpty()) {
for (Object attr : optional) {
if (!required.contains(attr)) {
required.add(attr);
}
}
}
for (Iterator<String> iterator = required.iterator(); iterator.hasNext(); ) {
String claimUri = iterator.next();
if (!requiredAttributes.contains(claimUri)) {
requiredAttributes.add(claimUri);
}
}
}
}
} catch (MessageException e) {
log.error("Failed to add required attributes of OpenID Simple Registration", e);
throw IdentityException.error("Failed to add required attributes of OpenID Simple Registration", e);
}
}
/**
* {@inheritDoc}
*/
@Override
public MessageExtension getMessageExtension(String userId, String profileName, OpenIDAuthRequestDTO requestDTO)
throws IdentityException {
MessageExtension extension = null;
AuthRequest authRequest = null;
SRegResponse response = null;
try {
authRequest = request.getAuthRequest();
if (authRequest.hasExtension(SRegRequest.OPENID_NS_SREG)) {
extension = authRequest.getExtension(SRegRequest.OPENID_NS_SREG);
} else if (authRequest.hasExtension(SRegMessage.OPENID_NS_SREG11)) {
extension = authRequest.getExtension(SRegMessage.OPENID_NS_SREG11);
} else if (authRequest.hasExtension(AxMessage.OPENID_NS_AX)) {
extension = authRequest.getExtension(AxMessage.OPENID_NS_AX);
}
if (log.isDebugEnabled()) {
if (extension == null) {
log.info("SReg extension is null");
} else {
log.info("SReg extension: " + extension.getTypeUri());
}
}
if (extension instanceof SRegRequest) {
SRegRequest sregReq = null;
List required = null;
List optional = null;
Map userDataSReg = null;
Map<String, OpenIDClaimDTO> claimValues = null;
sregReq = (SRegRequest) extension;
// Get the required attributes as requested by the RP.
required = sregReq.getAttributes(true);
optional = sregReq.getAttributes();
if (optional != null && !optional.isEmpty()) {
for (Object attr : optional) {
if (!required.contains(attr)) {
required.add(attr);
}
}
}
if (log.isDebugEnabled()) {
log.info("Required attributes for SReg request: " + required.toString());
}
userDataSReg = new HashMap();
response = SRegResponse.createSRegResponse(sregReq, userDataSReg);
claimValues = populateAttributeValues(required, userId, profileName, requestDTO);
setSimpleAttributeRegistrationValues(response, claimValues);
}
return response;
} catch (MessageException e) {
log.error("Failed to create message extension for OpenID Simple Registration", e);
throw IdentityException.error("Failed to create message extension for OpenID Simple Registration", e);
}
}
/**
* Populate the response with claim values. If we can't find the required values with us, we
* simply avoid sending them. An Identity Provider MAY return any subset of the following fields
* in response to the query.
*
* @param response Simple Registration response.
* @param claimValues Claim values.
* @throws MessageException
*/
protected void setSimpleAttributeRegistrationValues(SRegResponse response,
Map<String, OpenIDClaimDTO> claimValues)
throws MessageException {
Iterator<Entry<String, OpenIDClaimDTO>> iterator = null;
OpenIDClaimDTO claim = null;
Entry<String, OpenIDClaimDTO> entry = null;
iterator = claimValues.entrySet().iterator();
while (iterator.hasNext()) {
entry = iterator.next();
claim = entry.getValue();
response.addAttribute(claim.getClaimUri(), claim.getClaimValue());
}
}
}
| |
package org.opentech.db;
import android.app.SearchManager;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteStatement;
import android.net.Uri;
import android.provider.BaseColumns;
import android.support.v4.content.LocalBroadcastManager;
import android.text.TextUtils;
import org.opentech.model.Day;
import org.opentech.model.FossasiaEvent;
import org.opentech.model.Person;
import org.opentech.model.Speaker;
import org.opentech.model.Sponsor;
import org.opentech.model.Venue;
import org.opentech.utils.StringUtils;
import java.util.ArrayList;
/**
* Here comes the badass SQL.
*
* @author Christophe Beyls
*/
public class DatabaseManager {
public static final String ACTION_SCHEDULE_REFRESHED = "be.digitalia.fosdem.action.SCHEDULE_REFRESHED";
public static final String ACTION_ADD_BOOKMARK = "be.digitalia.fosdem.action.ADD_BOOKMARK";
public static final String EXTRA_EVENT_ID = "event_id";
public static final String EXTRA_EVENT_START_TIME = "event_start";
public static final String ACTION_REMOVE_BOOKMARKS = "be.digitalia.fosdem.action.REMOVE_BOOKMARKS";
public static final String EXTRA_EVENT_IDS = "event_ids";
public static final int PERSON_NAME_COLUMN_INDEX = 1;
private static final Uri URI_TRACKS = Uri.parse("sqlite://be.digitalia.fosdem/tracks");
private static final Uri URI_EVENTS = Uri.parse("sqlite://be.digitalia.fosdem/events");
private static final String DB_PREFS_FILE = "database";
private static final String LAST_UPDATE_TIME_PREF = "last_update_time";
private static final String[] COUNT_PROJECTION = new String[]{"count(*)"};
// Ignore conflicts in case of existing person
private static DatabaseManager instance;
private Context context;
private DatabaseHelper helper;
private DatabaseManager(Context context) {
this.context = context;
helper = new DatabaseHelper(context);
}
public static void init(Context context) {
if (instance == null) {
instance = new DatabaseManager(context);
}
}
public static DatabaseManager getInstance() {
return instance;
}
private static long queryNumEntries(SQLiteDatabase db, String table, String selection, String[] selectionArgs) {
Cursor cursor = db.query(table, COUNT_PROJECTION, selection, selectionArgs, null, null, null);
try {
cursor.moveToFirst();
return cursor.getLong(0);
} finally {
cursor.close();
}
}
private static void bindString(SQLiteStatement statement, int index, String value) {
if (value == null) {
statement.bindNull(index);
} else {
statement.bindString(index, value);
}
}
private static void clearDatabase(SQLiteDatabase db) {
db.delete(DatabaseHelper.EVENTS_TABLE_NAME, null, null);
db.delete(DatabaseHelper.EVENTS_TITLES_TABLE_NAME, null, null);
db.delete(DatabaseHelper.PERSONS_TABLE_NAME, null, null);
db.delete(DatabaseHelper.EVENTS_PERSONS_TABLE_NAME, null, null);
db.delete(DatabaseHelper.LINKS_TABLE_NAME, null, null);
db.delete(DatabaseHelper.TRACKS_TABLE_NAME, null, null);
db.delete(DatabaseHelper.DAYS_TABLE_NAME, null, null);
// Deleting Fossasia tables
db.delete(DatabaseHelper.TABLE_NAME_KEY_SPEAKERS, null, null);
db.delete(DatabaseHelper.TABLE_NAME_SCHEDULE, null, null);
db.delete(DatabaseHelper.TABLE_NAME_SPEAKER_EVENT_RELATION, null, null);
db.delete(DatabaseHelper.TABLE_NAME_TRACK, null, null);
db.delete(DatabaseHelper.TABLE_NAME_SPONSOR, null, null);
}
public ArrayList<FossasiaEvent> getBookmarkEvents() {
ArrayList<FossasiaEvent> bookmarkedEvents = new ArrayList<>();
Cursor cursor = helper.getReadableDatabase().rawQuery("SELECT * FROM " + DatabaseHelper.BOOKMARKS_TABLE_NAME, null);
if (cursor.moveToFirst()) {
do {
bookmarkedEvents.add(getEventById(cursor.getInt(0)));
}
while (cursor.moveToNext());
}
cursor.close();
return bookmarkedEvents;
}
public static long toEventId(Cursor cursor) {
return cursor.getLong(0);
}
public static long toEventStartTimeMillis(Cursor cursor) {
return cursor.isNull(1) ? -1L : cursor.getLong(1);
}
public static Person toPerson(Cursor cursor, Person person) {
if (person == null) {
person = new Person();
}
person.setId(cursor.getLong(0));
person.setName(cursor.getString(1));
return person;
}
public static Person toPerson(Cursor cursor) {
return toPerson(cursor, null);
}
private SharedPreferences getSharedPreferences() {
return context.getSharedPreferences(DB_PREFS_FILE, Context.MODE_PRIVATE);
}
public void performInsertQueries(ArrayList<String> queries) {
SQLiteDatabase db = helper.getWritableDatabase();
db.beginTransaction();
for (String query : queries) {
db.execSQL(query);
}
db.setTransactionSuccessful();
db.endTransaction();
}
public void clearDatabase() {
SQLiteDatabase db = helper.getWritableDatabase();
db.beginTransaction();
try {
clearDatabase(db);
db.setTransactionSuccessful();
getSharedPreferences().edit().remove(LAST_UPDATE_TIME_PREF).commit();
} finally {
db.endTransaction();
context.getContentResolver().notifyChange(URI_TRACKS, null);
context.getContentResolver().notifyChange(URI_EVENTS, null);
LocalBroadcastManager.getInstance(context).sendBroadcast(new Intent(ACTION_SCHEDULE_REFRESHED));
}
}
public ArrayList<Day> getDates(String track) {
ArrayList<Day> days = new ArrayList<Day>();
String query = "SELECT date FROM schedule WHERE track='%s' GROUP BY date";
Cursor cursor = helper.getReadableDatabase().rawQuery(String.format(query, track), null);
int count = 0;
if (cursor.moveToFirst()) {
do {
days.add(new Day(count, cursor.getString(0)));
count++;
}
while (cursor.moveToNext());
}
cursor.close();
return days;
}
public FossasiaEvent getEventById(int id) {
FossasiaEvent temp = null;
Cursor cursor = helper.getReadableDatabase().rawQuery("SELECT * FROM schedule WHERE id=" + id, null);
String title;
String subTitle;
String date;
String day;
String startTime;
String abstractText;
String description;
String venue;
String track;
String moderator;
if (cursor.moveToFirst()) {
do {
id = cursor.getInt(0);
title = cursor.getString(1);
subTitle = cursor.getString(2);
date = cursor.getString(3);
day = cursor.getString(4);
startTime = cursor.getString(5);
abstractText = cursor.getString(6);
description = cursor.getString(7);
venue = cursor.getString(8);
track = cursor.getString(9);
moderator = cursor.getString(10);
Cursor cursorSpeaker = helper.getReadableDatabase().rawQuery(String.format("SELECT speaker FROM %s WHERE event_id=%d", DatabaseHelper.TABLE_NAME_SPEAKER_EVENT_RELATION, id), null);
ArrayList<String> speakers = new ArrayList<String>();
if (cursorSpeaker.moveToFirst()) {
do {
speakers.add(cursorSpeaker.getString(0));
}
while (cursorSpeaker.moveToNext());
}
temp = new FossasiaEvent(id, title, subTitle, speakers, date, day, date + " " + startTime, abstractText, description, venue, track, moderator);
}
while (cursor.moveToNext());
}
cursor.close();
return temp;
}
public ArrayList<FossasiaEvent> getEventsByDate(String selectDate) {
Cursor cursor = helper.getReadableDatabase().rawQuery("SELECT * FROM schedule WHERE date='" + selectDate + "'", null);
ArrayList<FossasiaEvent> fossasiaEventList = new ArrayList<FossasiaEvent>();
int id;
String title;
String subTitle;
String date;
String day;
String startTime;
String abstractText;
String description;
String venue;
String track;
String moderator;
if (cursor.moveToFirst()) {
do {
id = cursor.getInt(0);
title = cursor.getString(1);
subTitle = cursor.getString(2);
date = cursor.getString(3);
day = cursor.getString(4);
startTime = cursor.getString(5);
abstractText = cursor.getString(6);
description = cursor.getString(7);
venue = cursor.getString(8);
track = cursor.getString(9);
moderator = cursor.getString(10);
Cursor cursorSpeaker = helper.getReadableDatabase().rawQuery(String.format("SELECT speaker FROM %s WHERE event_id=%d", DatabaseHelper.TABLE_NAME_SPEAKER_EVENT_RELATION, id), null);
ArrayList<String> speakers = new ArrayList<String>();
if (cursorSpeaker.moveToFirst()) {
do {
speakers.add(cursorSpeaker.getString(0));
}
while (cursorSpeaker.moveToNext());
}
fossasiaEventList.add(new FossasiaEvent(id, title, subTitle, speakers, date, day, startTime, abstractText, description, venue, track, moderator));
}
while (cursor.moveToNext());
}
cursor.close();
return fossasiaEventList;
}
public ArrayList<FossasiaEvent> getEventBySpeaker(String name) {
Cursor cursorEvents = helper.getReadableDatabase().rawQuery(String.format("SELECT event FROM %s WHERE speaker='%s'", DatabaseHelper.TABLE_NAME_SPEAKER_EVENT_RELATION, name), null);
ArrayList<String> events = new ArrayList<String>();
if (cursorEvents.moveToFirst()) {
do {
events.add(cursorEvents.getString(0));
}
while (cursorEvents.moveToNext());
}
cursorEvents.close();
ArrayList<FossasiaEvent> fossasiaEventList = new ArrayList<FossasiaEvent>();
for (String event : events) {
Cursor cursor = helper.getReadableDatabase().rawQuery("SELECT * FROM schedule WHERE title='" + StringUtils.removeDiacritics(event) + "'", null);
int id;
String title;
String subTitle;
String date;
String day;
String startTime;
String abstractText;
String description;
String venue;
String track;
String moderator;
if (cursor.moveToFirst()) {
do {
id = cursor.getInt(0);
title = cursor.getString(1);
subTitle = cursor.getString(2);
date = cursor.getString(3);
day = cursor.getString(4);
startTime = cursor.getString(5);
abstractText = cursor.getString(6);
description = cursor.getString(7);
venue = cursor.getString(8);
track = cursor.getString(9);
moderator = cursor.getString(10);
Cursor cursorSpeaker = helper.getReadableDatabase().rawQuery(String.format("SELECT speaker FROM %s WHERE event_id=%d", DatabaseHelper.TABLE_NAME_SPEAKER_EVENT_RELATION, id), null);
ArrayList<String> speakers = new ArrayList<String>();
if (cursorSpeaker.moveToFirst()) {
do {
speakers.add(cursorSpeaker.getString(0));
}
while (cursorSpeaker.moveToNext());
}
fossasiaEventList.add(new FossasiaEvent(id, title, subTitle, speakers, date, day, date + " " + startTime, abstractText, description, venue, track, moderator));
}
while (cursor.moveToNext());
}
cursor.close();
}
return fossasiaEventList;
}
public ArrayList<FossasiaEvent> getEventsByDateandTrack(String selectDate, String track) {
Cursor cursor = helper.getReadableDatabase().rawQuery("SELECT * FROM schedule WHERE date='" + selectDate + "' AND track='" + track + "'", null);
ArrayList<FossasiaEvent> fossasiaEventList = new ArrayList<FossasiaEvent>();
int id;
String title;
String subTitle;
String date;
String day;
String startTime;
String abstractText;
String description;
String venue;
String moderator;
if (cursor.moveToFirst()) {
do {
id = cursor.getInt(0);
title = cursor.getString(1);
subTitle = cursor.getString(2);
date = cursor.getString(3);
day = cursor.getString(4);
startTime = cursor.getString(5);
abstractText = cursor.getString(6);
description = cursor.getString(7);
venue = cursor.getString(8);
track = cursor.getString(9);
moderator = cursor.getString(10);
Cursor cursorSpeaker = helper.getReadableDatabase().rawQuery(String.format("SELECT speaker FROM %s WHERE event_id=%d", DatabaseHelper.TABLE_NAME_SPEAKER_EVENT_RELATION, id), null);
ArrayList<String> speakers = new ArrayList<String>();
if (cursorSpeaker.moveToFirst()) {
do {
speakers.add(cursorSpeaker.getString(0));
}
while (cursorSpeaker.moveToNext());
}
fossasiaEventList.add(new FossasiaEvent(id, title, subTitle, speakers, date, day, startTime, abstractText, description, venue, track, moderator));
}
while (cursor.moveToNext());
}
cursor.close();
return fossasiaEventList;
}
public ArrayList<Speaker> getSpeakers(boolean fetchKeySpeaker) {
Cursor cursor = helper.getReadableDatabase().query(DatabaseHelper.TABLE_NAME_KEY_SPEAKERS, null, null, null, null, null, null);
ArrayList<Speaker> speakers = new ArrayList<Speaker>();
int id;
String name;
String designation;
String profilePicUrl;
String information;
String twitterHandle;
String linkedInUrl;
int isKeySpeaker;
if (cursor.moveToFirst()) {
do {
id = cursor.getInt(0);
name = cursor.getString(1);
designation = cursor.getString(2);
information = cursor.getString(3);
twitterHandle = cursor.getString(4);
linkedInUrl = cursor.getString(5);
profilePicUrl = cursor.getString(6);
isKeySpeaker = cursor.getInt(7);
if (isKeySpeaker == 1 && fetchKeySpeaker) {
speakers.add(new Speaker(id, name, information, linkedInUrl, twitterHandle, designation, profilePicUrl, isKeySpeaker));
} else if (isKeySpeaker == 0 && !fetchKeySpeaker) {
speakers.add(new Speaker(id, name, information, linkedInUrl, twitterHandle, designation, profilePicUrl, isKeySpeaker));
}
}
while (cursor.moveToNext());
}
cursor.close();
return speakers;
}
public Cursor getTracks() {
Cursor cursor = helper.getReadableDatabase().rawQuery(
"SELECT * FROM " + DatabaseHelper.TABLE_NAME_TRACK, null);
return cursor;
}
public ArrayList<Sponsor> getSponsors() {
Cursor cursor = helper.getReadableDatabase().query(DatabaseHelper.TABLE_NAME_SPONSOR, null, null, null, null, null, null);
ArrayList<Sponsor> sponsors = new ArrayList<Sponsor>();
int id;
String name;
String img;
String url;
if (cursor.moveToFirst()) {
do {
id = cursor.getInt(0);
name = cursor.getString(1);
img = cursor.getString(2);
url = cursor.getString(3);
sponsors.add(new Sponsor(id, name, img, url));
}
while (cursor.moveToNext());
}
cursor.close();
return sponsors;
}
/**
* Returns the bookmarks.
*
* @param minStartTime When positive, only return the events starting after this time.
* @return A cursor to Events
*/
public Cursor getBookmarks(long minStartTime) {
String whereCondition;
String[] selectionArgs;
if (minStartTime > 0L) {
whereCondition = " WHERE e.start_time > ?";
selectionArgs = new String[]{String.valueOf(minStartTime)};
} else {
whereCondition = "";
selectionArgs = null;
}
Cursor cursor = helper
.getReadableDatabase()
.rawQuery(
"SELECT e.id AS _id, e.start_time, e.end_time, e.room_name, e.slug, et.title, et.subtitle, e.abstract, e.description, GROUP_CONCAT(p.name, ', '), e.day_index, d.date, t.name, t.type, 1"
+ " FROM "
+ DatabaseHelper.BOOKMARKS_TABLE_NAME
+ " b"
+ " JOIN "
+ DatabaseHelper.EVENTS_TABLE_NAME
+ " e ON b.event_id = e.id"
+ " JOIN "
+ DatabaseHelper.EVENTS_TITLES_TABLE_NAME
+ " et ON e.id = et.rowid"
+ " JOIN "
+ DatabaseHelper.DAYS_TABLE_NAME
+ " d ON e.day_index = d._index"
+ " JOIN "
+ DatabaseHelper.TRACKS_TABLE_NAME
+ " t ON e.track_id = t.id"
+ " LEFT JOIN "
+ DatabaseHelper.EVENTS_PERSONS_TABLE_NAME
+ " ep ON e.id = ep.event_id"
+ " LEFT JOIN "
+ DatabaseHelper.PERSONS_TABLE_NAME
+ " p ON ep.person_id = p.rowid" + whereCondition + " GROUP BY e.id" + " ORDER BY e.start_time ASC", selectionArgs);
cursor.setNotificationUri(context.getContentResolver(), URI_EVENTS);
return cursor;
}
/**
* Search through matching titles, subtitles, track names, person names. We need to use an union of 3 sub-queries because a "match" condition can not be
* accompanied by other conditions in a "where" statement.
*
* @param query
* @return A cursor to Events
*/
public Cursor getSearchResults(String query) {
final String matchQuery = query + "*";
String[] selectionArgs = new String[]{matchQuery, "%" + query + "%", matchQuery};
Cursor cursor = helper
.getReadableDatabase()
.rawQuery(
"SELECT e.id AS _id, e.start_time, e.end_time, e.room_name, e.slug, et.title, et.subtitle, e.abstract, e.description, GROUP_CONCAT(p.name, ', '), e.day_index, d.date, t.name, t.type, b.event_id"
+ " FROM "
+ DatabaseHelper.EVENTS_TABLE_NAME
+ " e"
+ " JOIN "
+ DatabaseHelper.EVENTS_TITLES_TABLE_NAME
+ " et ON e.id = et.rowid"
+ " JOIN "
+ DatabaseHelper.DAYS_TABLE_NAME
+ " d ON e.day_index = d._index"
+ " JOIN "
+ DatabaseHelper.TRACKS_TABLE_NAME
+ " t ON e.track_id = t.id"
+ " LEFT JOIN "
+ DatabaseHelper.EVENTS_PERSONS_TABLE_NAME
+ " ep ON e.id = ep.event_id"
+ " LEFT JOIN "
+ DatabaseHelper.PERSONS_TABLE_NAME
+ " p ON ep.person_id = p.rowid"
+ " LEFT JOIN "
+ DatabaseHelper.BOOKMARKS_TABLE_NAME
+ " b ON e.id = b.event_id"
+ " WHERE e.id IN ( "
+ "SELECT rowid"
+ " FROM "
+ DatabaseHelper.EVENTS_TITLES_TABLE_NAME
+ " WHERE "
+ DatabaseHelper.EVENTS_TITLES_TABLE_NAME
+ " MATCH ?"
+ " UNION "
+ "SELECT e.id"
+ " FROM "
+ DatabaseHelper.EVENTS_TABLE_NAME
+ " e"
+ " JOIN "
+ DatabaseHelper.TRACKS_TABLE_NAME
+ " t ON e.track_id = t.id"
+ " WHERE t.name LIKE ?"
+ " UNION "
+ "SELECT ep.event_id"
+ " FROM "
+ DatabaseHelper.EVENTS_PERSONS_TABLE_NAME
+ " ep"
+ " JOIN "
+ DatabaseHelper.PERSONS_TABLE_NAME
+ " p ON ep.person_id = p.rowid" + " WHERE p.name MATCH ?" + " )" + " GROUP BY e.id" + " ORDER BY e.start_time ASC",
selectionArgs);
cursor.setNotificationUri(context.getContentResolver(), URI_EVENTS);
return cursor;
}
/**
* Method called by SearchSuggestionProvider to return search results in the format expected by the search framework.
*/
public Cursor getSearchSuggestionResults(String query, int limit) {
final String matchQuery = query + "*";
String[] selectionArgs = new String[]{matchQuery, "%" + query + "%", matchQuery, String.valueOf(limit)};
// Query is similar to getSearchResults but returns different columns, does not join the Day table or the Bookmark table and limits the result set.
Cursor cursor = helper.getReadableDatabase().rawQuery(
"SELECT e.id AS " + BaseColumns._ID + ", et.title AS " + SearchManager.SUGGEST_COLUMN_TEXT_1
+ ", IFNULL(GROUP_CONCAT(p.name, ', '), '') || ' - ' || t.name AS " + SearchManager.SUGGEST_COLUMN_TEXT_2 + ", e.id AS "
+ SearchManager.SUGGEST_COLUMN_INTENT_DATA + " FROM " + DatabaseHelper.EVENTS_TABLE_NAME + " e" + " JOIN "
+ DatabaseHelper.EVENTS_TITLES_TABLE_NAME + " et ON e.id = et.rowid" + " JOIN " + DatabaseHelper.TRACKS_TABLE_NAME
+ " t ON e.track_id = t.id" + " LEFT JOIN " + DatabaseHelper.EVENTS_PERSONS_TABLE_NAME + " ep ON e.id = ep.event_id" + " LEFT JOIN "
+ DatabaseHelper.PERSONS_TABLE_NAME + " p ON ep.person_id = p.rowid" + " WHERE e.id IN ( " + "SELECT rowid" + " FROM "
+ DatabaseHelper.EVENTS_TITLES_TABLE_NAME + " WHERE " + DatabaseHelper.EVENTS_TITLES_TABLE_NAME + " MATCH ?" + " UNION "
+ "SELECT e.id" + " FROM " + DatabaseHelper.EVENTS_TABLE_NAME + " e" + " JOIN " + DatabaseHelper.TRACKS_TABLE_NAME
+ " t ON e.track_id = t.id" + " WHERE t.name LIKE ?" + " UNION " + "SELECT ep.event_id" + " FROM "
+ DatabaseHelper.EVENTS_PERSONS_TABLE_NAME + " ep" + " JOIN " + DatabaseHelper.PERSONS_TABLE_NAME + " p ON ep.person_id = p.rowid"
+ " WHERE p.name MATCH ?" + " )" + " GROUP BY e.id" + " ORDER BY e.start_time ASC LIMIT ?", selectionArgs);
return cursor;
}
/**
* Returns all persons in alphabetical order.
*/
public Cursor getPersons() {
Cursor cursor = helper.getReadableDatabase().rawQuery(
"SELECT rowid AS _id, name" + " FROM " + DatabaseHelper.PERSONS_TABLE_NAME + " ORDER BY name COLLATE NOCASE", null);
cursor.setNotificationUri(context.getContentResolver(), URI_EVENTS);
return cursor;
}
public boolean isBookmarked(FossasiaEvent event) {
String[] selectionArgs = new String[]{String.valueOf(event.getId())};
return queryNumEntries(helper.getReadableDatabase(), DatabaseHelper.BOOKMARKS_TABLE_NAME, "event_id = ?", selectionArgs) > 0L;
}
public String getTrackMapUrl(String track) {
Cursor cursor = helper.getReadableDatabase().rawQuery(String.format("SELECT map FROM %s WHERE track='%s'", DatabaseHelper.TABLE_NAME_TRACK_VENUE, track), null);
String map = ""; //= "htttp://maps.google.com/";
if (cursor.moveToFirst()) {
map = cursor.getString(0);
}
cursor.close();
return map;
}
public boolean addBookmark(FossasiaEvent event) {
boolean complete = false;
SQLiteDatabase db = helper.getWritableDatabase();
db.beginTransaction();
try {
ContentValues values = new ContentValues();
values.put("event_id", event.getId());
long result = db.insert(DatabaseHelper.BOOKMARKS_TABLE_NAME, null, values);
// If the bookmark is already present
if (result == -1L) {
return false;
}
db.setTransactionSuccessful();
complete = true;
return true;
} finally {
db.endTransaction();
if (complete) {
context.getContentResolver().notifyChange(URI_EVENTS, null);
Intent intent = new Intent(ACTION_ADD_BOOKMARK).putExtra(EXTRA_EVENT_ID, event.getId());
// TODO: For now commented this, must implement String to date converter.
// Date startTime = event.getStartTime();
// if (startTime != null) {
// intent.putExtra(EXTRA_EVENT_START_TIME, startTime.getTime());
// }
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
}
}
public boolean removeBookmark(FossasiaEvent event) {
return removeBookmarks(new long[]{event.getId()});
}
public boolean removeBookmark(long eventId) {
return removeBookmarks(new long[]{eventId});
}
public boolean removeBookmarks(long[] eventIds) {
int length = eventIds.length;
if (length == 0) {
throw new IllegalArgumentException("At least one bookmark id to remove must be passed");
}
String[] stringEventIds = new String[length];
for (int i = 0; i < length; ++i) {
stringEventIds[i] = String.valueOf(eventIds[i]);
}
boolean isComplete = false;
SQLiteDatabase db = helper.getWritableDatabase();
db.beginTransaction();
try {
String whereClause = "event_id IN (" + TextUtils.join(",", stringEventIds) + ")";
int count = db.delete(DatabaseHelper.BOOKMARKS_TABLE_NAME, whereClause, null);
if (count == 0) {
return false;
}
db.setTransactionSuccessful();
isComplete = true;
return true;
} finally {
db.endTransaction();
if (isComplete) {
context.getContentResolver().notifyChange(URI_EVENTS, null);
Intent intent = new Intent(ACTION_REMOVE_BOOKMARKS).putExtra(EXTRA_EVENT_IDS, eventIds);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
}
}
public Venue getVenueFromTrack(String track) {
String query = "SELECT * FROM %s WHERE track='%s'";
Venue ven = null;
Cursor cursor = helper.getReadableDatabase().rawQuery(String.format(query, DatabaseHelper.TABLE_NAME_VENUE, track), null);
if (cursor.moveToFirst()) {
//tract TEXT, venue TEXT, map TEXT, room TEXT, link TEXT, address TEXT, how_to_reach TEXT }
String venue = cursor.getString(1);
String map = cursor.getString(2);
String room = cursor.getString(3);
String link = cursor.getString(4);
String address = cursor.getString(5);
String howToReach = cursor.getString(6);
ven = new Venue(track, venue, map, room, link, address, howToReach);
}
return ven;
}
}
| |
/*
* Copyright 2013 Jin Kwon <jinahya at gmail.com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.jinahya.security;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.security.MessageDigest;
import java.util.Arrays;
import java.util.List;
/**
* a utility class for {@link MessageDigest}s.
*
* @author Jin Kwon <jinahya at gmail.com>
*/
public class MessageDigests {
/**
* Algorithms that every implementation of the Java platform is required to
* support.
*/
public static final List<String> SUPPORTED_ALGORITHMS =
Arrays.asList("MD5", "SHA-1", "SHA-256");
/**
* Digests on given {@code digest} with bytes read from given input stream
* using specified {@code buffer}.
*
* @param digest the digest
* @param input the input stream
* @param buffer the buffer.
* @param length the maximum number of bytes to digest; any negative value
* for all available bytes.
*
* @return digest result
*
* @throws IOException if an I/O error occurs
*/
public static byte[] digest(final MessageDigest digest,
final InputStream input, final byte[] buffer,
final long length)
throws IOException {
if (digest == null) {
throw new NullPointerException("digest");
}
if (input == null) {
throw new NullPointerException("input");
}
if (buffer == null) {
throw new NullPointerException("buffer");
}
if (buffer.length == 0) {
throw new IllegalArgumentException("buffer.length == 0");
}
long count = 0L;
for (int read; length < 0L || count < length; count += read) {
final int l = length < 0L ? buffer.length
: (int) Math.min(buffer.length, length - count);
read = input.read(buffer, 0, l);
if (read == -1) {
break;
}
digest.update(buffer, 0, read);
}
return digest.digest();
}
/**
* Digests on given {@code digest} with bytes read from given input file
* using specified {@code buffer}.
*
* @param digest the digest
* @param input the input file
* @param buffer the buffer.
* @param length the maximum number of byte to digest; any negative for all
* available bytes.
*
* @return digest result
*
* @throws IOException if an I/O error occurs
*
* @see #digest(MessageDigest, InputStream, byte[], long)
*/
public static byte[] digest(final MessageDigest digest, final File input,
final byte[] buffer, final long length)
throws IOException {
if (input == null) {
throw new NullPointerException("input");
}
final InputStream input_ = new FileInputStream(input);
try {
return digest(digest, input_, buffer, length);
} finally {
input_.close();
}
}
/**
* Digests on give {@code digest} with bytes read from given input channel
* using specified {@code buffer}.
*
* @param digest the digest
* @param input the input channel
* @param buffer the buffer.
* @param length the maximum number of byte to digest; any negative for all
* available bytes.
*
* @return digest result
*
* @throws IOException if an I/O error occurs
*/
public static byte[] digest(final MessageDigest digest,
final ReadableByteChannel input,
final ByteBuffer buffer, final long length)
throws IOException {
if (digest == null) {
throw new NullPointerException("digest");
}
if (input == null) {
throw new NullPointerException("input");
}
if (buffer == null) {
throw new NullPointerException("buffer");
}
if (buffer.capacity() == 0) {
throw new IllegalArgumentException("buffer.capacity == 0");
}
long count = 0L;
for (int read; length < 0L || count < length; count += read) {
buffer.clear(); // position -> 0, limit -> capacity
if (length >= 0L) {
final long r = length - count;
if (r < buffer.capacity()) {
buffer.limit((int) r);
}
}
read = input.read(buffer);
if (read == -1) {
break;
}
buffer.flip(); // limit -> position, position -> 0
digest.update(buffer); // position -> limit
}
return digest.digest();
}
/**
* Digests on given {@code digest} with bytes read from given input file
* using specified {@code buffer}.
*
* @param digest the digest
* @param input the input file
* @param buffer the buffer.
* @param length the maximum number of byte to digest; any negative for all
* available bytes.
*
* @return digest result
*
* @throws IOException if an I/O error occurs; or {@code length} is not
* {@link #ALL} and reached to EOF before processing specified number of
* bytes.
*/
public static byte[] digest(final MessageDigest digest, final File input,
final ByteBuffer buffer, final long length)
throws IOException {
if (input == null) {
throw new NullPointerException("input");
}
final ReadableByteChannel input_ =
new FileInputStream(input).getChannel();
try {
return digest(digest, input_, buffer, length);
} finally {
input_.close();
}
}
/**
* Digests on given digest with bytes read from given input stream and
* writes the result to given output stream.
*
* @param digest the digest
* @param input the input stream
* @param output the output stream
* @param buffer the buffer.
* @param length the maximum number of bytes to digest; any negative value
* for all available bytes.
*
* @return the actual number of bytes digested
*
* @throws IOException if an I/O error occurs
*/
public static long digest(final MessageDigest digest,
final InputStream input,
final OutputStream output, final byte[] buffer,
final long length)
throws IOException {
if (digest == null) {
throw new NullPointerException("digest");
}
if (input == null) {
throw new NullPointerException("input");
}
if (output == null) {
throw new NullPointerException("output");
}
if (buffer == null) {
throw new NullPointerException("buffer");
}
if (buffer.length == 0) {
throw new IllegalArgumentException("buffer.length == 0");
}
long count = 0L;
for (int read; length < 0L || count < length; count += read) {
final int l = length < 0L ? buffer.length
: (int) Math.min(buffer.length, length - count);
read = input.read(buffer, 0, l);
if (read == -1) {
break;
}
digest.update(buffer, 0, read);
}
output.write(digest.digest());
return count;
}
/**
* Digests on give digest with bytes read from given input channel
* and writes the result to given output channel.
*
* @param digest the digest
* @param input the input channel
* @param output the output channel
* @param buffer the buffer.
* @param length the maximum number of byte to digest; any negative for all
* available bytes.
*
* @return the actual number of bytes digested
*
* @throws IOException if an I/O error occurs
*/
public static long digest(final MessageDigest digest,
final ReadableByteChannel input,
final WritableByteChannel output,
final ByteBuffer buffer, final long length)
throws IOException {
if (digest == null) {
throw new NullPointerException("digest");
}
if (input == null) {
throw new NullPointerException("input");
}
if (output == null) {
throw new NullPointerException("output");
}
if (buffer == null) {
throw new NullPointerException("buffer");
}
if (buffer.capacity() == 0) {
throw new IllegalArgumentException("buffer.capacity == 0");
}
long count = 0L;
for (int read; length < 0L || count < length; count += read) {
buffer.clear(); // position -> 0, limit -> capacity
if (length >= 0L) {
final long r = length - count;
if (r < buffer.capacity()) {
buffer.limit((int) r);
}
}
read = input.read(buffer);
if (read == -1) {
break;
}
buffer.flip(); // limit -> position, position -> 0
digest.update(buffer); // position -> limit
}
final ByteBuffer b = ByteBuffer.wrap(digest.digest());
while (b.hasRemaining()) {
output.write(b);
}
return count;
}
/**
* Creates a new instance.
*/
protected MessageDigests() {
super();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testFramework;
import com.intellij.ProjectTopics;
import com.intellij.codeInsight.completion.CompletionProgressIndicator;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintManagerImpl;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.codeInspection.InspectionProfileEntry;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.ex.InspectionToolWrapper;
import com.intellij.ide.highlighter.ProjectFileType;
import com.intellij.ide.startup.StartupManagerEx;
import com.intellij.ide.startup.impl.StartupManagerImpl;
import com.intellij.idea.IdeaLogger;
import com.intellij.idea.IdeaTestApplication;
import com.intellij.mock.MockApplication;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ex.ApplicationEx;
import com.intellij.openapi.application.impl.ApplicationInfoImpl;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.command.impl.DocumentReferenceManagerImpl;
import com.intellij.openapi.command.impl.UndoManagerImpl;
import com.intellij.openapi.command.undo.DocumentReferenceManager;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.impl.EditorFactoryImpl;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl;
import com.intellij.openapi.module.EmptyModuleType;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.ModuleListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.project.impl.ProjectImpl;
import com.intellij.openapi.project.impl.ProjectManagerImpl;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.EmptyRunnable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.encoding.EncodingManagerImpl;
import com.intellij.openapi.vfs.impl.VirtualFilePointerTracker;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFSImpl;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiFileFactory;
import com.intellij.psi.PsiManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.DocumentCommitThread;
import com.intellij.psi.impl.PsiDocumentManagerImpl;
import com.intellij.psi.impl.PsiManagerImpl;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl;
import com.intellij.psi.templateLanguages.TemplateDataLanguageMappings;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.LocalTimeCounter;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.UnindexedFilesUpdater;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ref.GCUtil;
import com.intellij.util.ui.UIUtil;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
/**
* @author yole
*/
public abstract class LightPlatformTestCase extends UsefulTestCase implements DataProvider {
@NonNls private static final String LIGHT_PROJECT_MARK = "Light project: ";
private static IdeaTestApplication ourApplication;
@SuppressWarnings("FieldAccessedSynchronizedAndUnsynchronized")
protected static Project ourProject;
private static Module ourModule;
private static PsiManager ourPsiManager;
private static boolean ourAssertionsInTestDetected;
private static VirtualFile ourSourceRoot;
private static TestCase ourTestCase;
public static Thread ourTestThread;
private static LightProjectDescriptor ourProjectDescriptor;
private static SdkLeakTracker myOldSdks;
private ThreadTracker myThreadTracker;
static {
PlatformTestUtil.registerProjectCleanup(LightPlatformTestCase::closeAndDeleteProject);
}
private VirtualFilePointerTracker myVirtualFilePointerTracker;
/**
* @return Project to be used in tests for example for project components retrieval.
*/
public static Project getProject() {
return ourProject;
}
/**
* @return Module to be used in tests for example for module components retrieval.
*/
public static Module getModule() {
return ourModule;
}
/**
* Shortcut to PsiManager.getInstance(getProject())
*/
@NotNull
public static PsiManager getPsiManager() {
if (ourPsiManager == null) {
ourPsiManager = PsiManager.getInstance(ourProject);
}
return ourPsiManager;
}
@NotNull
public static IdeaTestApplication initApplication() {
ourApplication = IdeaTestApplication.getInstance();
return ourApplication;
}
@TestOnly
public static void disposeApplication() {
if (ourApplication != null) {
ApplicationManager.getApplication().runWriteAction(() -> Disposer.dispose(ourApplication));
ourApplication = null;
}
}
public static IdeaTestApplication getApplication() {
return ourApplication;
}
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public static void reportTestExecutionStatistics() {
System.out.println("----- TEST STATISTICS -----");
UsefulTestCase.logSetupTeardownCosts();
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.appInstancesCreated' value='%d']",
MockApplication.INSTANCES_CREATED));
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.projectInstancesCreated' value='%d']",
ProjectManagerImpl.TEST_PROJECTS_CREATED));
long totalGcTime = 0;
for (GarbageCollectorMXBean mxBean : ManagementFactory.getGarbageCollectorMXBeans()) {
totalGcTime += mxBean.getCollectionTime();
}
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.gcTimeMs' value='%d']", totalGcTime));
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.classesLoaded' value='%d']",
ManagementFactory.getClassLoadingMXBean().getTotalLoadedClassCount()));
}
protected void resetAllFields() {
resetClassFields(getClass());
}
private void resetClassFields(@NotNull Class<?> aClass) {
try {
UsefulTestCase.clearDeclaredFields(this, aClass);
}
catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
if (aClass == LightPlatformTestCase.class) return;
resetClassFields(aClass.getSuperclass());
}
private static void cleanPersistedVFSContent() {
((PersistentFSImpl)PersistentFS.getInstance()).cleanPersistedContents();
}
private static void initProject(@NotNull final LightProjectDescriptor descriptor) throws Exception {
ourProjectDescriptor = descriptor;
if (ourProject != null) {
closeAndDeleteProject();
}
ApplicationManager.getApplication().runWriteAction(LightPlatformTestCase::cleanPersistedVFSContent);
final File projectFile = FileUtil.createTempFile(ProjectImpl.LIGHT_PROJECT_NAME, ProjectFileType.DOT_DEFAULT_EXTENSION);
LocalFileSystem.getInstance().refreshAndFindFileByIoFile(projectFile);
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
new Throwable(projectFile.getPath()).printStackTrace(new PrintStream(buffer));
ourProject = PlatformTestCase.createProject(projectFile, LIGHT_PROJECT_MARK + buffer);
ourPathToKeep = projectFile.getPath();
ourPsiManager = null;
ourProjectDescriptor.setUpProject(ourProject, new LightProjectDescriptor.SetupHandler() {
@Override
public void moduleCreated(@NotNull Module module) {
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourModule = module;
}
@Override
public void sourceRootCreated(@NotNull VirtualFile sourceRoot) {
//noinspection AssignmentToStaticFieldFromInstanceMethod
ourSourceRoot = sourceRoot;
}
});
}
/**
* @return The only source root
*/
public static VirtualFile getSourceRoot() {
return ourSourceRoot;
}
@Override
protected void setUp() throws Exception {
EdtTestUtil.runInEdtAndWait(() -> {
super.setUp();
ApplicationInfoImpl.setInStressTest(isStressTest());
if (isPerformanceTest()) {
Timings.getStatistics();
}
initApplication();
ourApplication.setDataProvider(this);
LightProjectDescriptor descriptor = getProjectDescriptor();
doSetup(descriptor, configureLocalInspectionTools(), getTestRootDisposable());
InjectedLanguageManagerImpl.pushInjectors(getProject());
storeSettings();
myThreadTracker = new ThreadTracker();
ModuleRootManager.getInstance(ourModule).orderEntries().getAllLibrariesAndSdkClassesRoots();
myVirtualFilePointerTracker = new VirtualFilePointerTracker();
});
}
@NotNull
protected LightProjectDescriptor getProjectDescriptor() {
return new SimpleLightProjectDescriptor(getModuleType(), getProjectJDK());
}
public static void doSetup(@NotNull LightProjectDescriptor descriptor,
@NotNull LocalInspectionTool[] localInspectionTools,
@NotNull Disposable parentDisposable) throws Exception {
assertNull("Previous test " + ourTestCase + " hasn't called tearDown(). Probably overridden without super call.", ourTestCase);
IdeaLogger.ourErrorsOccurred = null;
ApplicationManager.getApplication().assertIsDispatchThread();
myOldSdks = new SdkLeakTracker();
boolean reusedProject = true;
if (ourProject == null || ourProjectDescriptor == null || !ourProjectDescriptor.equals(descriptor)) {
initProject(descriptor);
reusedProject = false;
}
ProjectManagerEx projectManagerEx = ProjectManagerEx.getInstanceEx();
try {
projectManagerEx.openTestProject(ourProject);
}
catch (Throwable e) {
ourProject = null;
throw e;
}
if (reusedProject) {
DumbService.getInstance(ourProject).queueTask(new UnindexedFilesUpdater(ourProject));
}
MessageBusConnection connection = ourProject.getMessageBus().connect(parentDisposable);
connection.subscribe(ProjectTopics.MODULES, new ModuleListener() {
@Override
public void moduleAdded(@NotNull Project project, @NotNull Module module) {
fail("Adding modules is not permitted in LightIdeaTestCase.");
}
});
clearUncommittedDocuments(getProject());
InspectionsKt.configureInspections(localInspectionTools, getProject(), parentDisposable);
assertFalse(getPsiManager().isDisposed());
Boolean passed = null;
try {
passed = StartupManagerEx.getInstanceEx(getProject()).startupActivityPassed();
}
catch (Exception ignored) {
}
assertTrue("open: " + getProject().isOpen() +
"; disposed:" + getProject().isDisposed() +
"; startup passed:" + passed +
"; all open projects: " + Arrays.asList(ProjectManager.getInstance().getOpenProjects()), getProject().isInitialized());
CodeStyleSettingsManager.getInstance(getProject()).setTemporarySettings(new CodeStyleSettings());
final FileDocumentManager manager = FileDocumentManager.getInstance();
if (manager instanceof FileDocumentManagerImpl) {
Document[] unsavedDocuments = manager.getUnsavedDocuments();
manager.saveAllDocuments();
ApplicationManager.getApplication().runWriteAction(((FileDocumentManagerImpl)manager)::dropAllUnsavedDocuments);
assertEmpty("There are unsaved documents", Arrays.asList(unsavedDocuments));
}
UIUtil.dispatchAllInvocationEvents(); // startup activities
((FileTypeManagerImpl)FileTypeManager.getInstance()).drainReDetectQueue();
}
protected void enableInspectionTools(@NotNull InspectionProfileEntry... tools) {
InspectionsKt.enableInspectionTools(getProject(), getTestRootDisposable(), tools);
}
protected void enableInspectionTool(@NotNull InspectionToolWrapper toolWrapper) {
InspectionsKt.enableInspectionTool(getProject(), toolWrapper, getTestRootDisposable());
}
protected void enableInspectionTool(@NotNull InspectionProfileEntry tool) {
InspectionsKt.enableInspectionTool(getProject(), tool, getTestRootDisposable());
}
@NotNull
protected LocalInspectionTool[] configureLocalInspectionTools() {
return LocalInspectionTool.EMPTY_ARRAY;
}
@SuppressWarnings("TearDownDoesntCallSuperTearDown")
@Override
protected void tearDown() throws Exception {
Project project = getProject();
// don't use method references here to make stack trace reading easier
//noinspection Convert2MethodRef
new RunAll(
() -> CodeStyleSettingsManager.getInstance(project).dropTemporarySettings(),
this::checkForSettingsDamage,
() -> doTearDown(project, ourApplication),
() -> checkEditorsReleased(),
() -> myOldSdks.checkForJdkTableLeaks(),
super::tearDown,
() -> myThreadTracker.checkLeak(),
() -> InjectedLanguageManagerImpl.checkInjectorsAreDisposed(project),
() -> myVirtualFilePointerTracker.assertPointersAreDisposed()
).run();
}
public static void doTearDown(@NotNull Project project, @NotNull IdeaTestApplication application) {
// don't use method references here to make stack trace reading easier
//noinspection Convert2MethodRef
new RunAll().
append(() -> ((FileTypeManagerImpl)FileTypeManager.getInstance()).drainReDetectQueue()).
append(() -> CodeStyleSettingsManager.getInstance(project).dropTemporarySettings()).
append(LightPlatformTestCase::checkJavaSwingTimersAreDisposed).
append(() -> UsefulTestCase.doPostponedFormatting(project)).
append(() -> LookupManager.getInstance(project).hideActiveLookup()).
append(() -> ((StartupManagerImpl)StartupManager.getInstance(project)).prepareForNextTest()).
append(() -> { if (ProjectManager.getInstance() == null) throw new AssertionError("Application components damaged"); }).
append(() -> WriteCommandAction.runWriteCommandAction(project, () -> {
if (ourSourceRoot != null) {
try {
for (VirtualFile child : ourSourceRoot.getChildren()) {
child.delete(LightPlatformTestCase.class);
}
}
catch (IOException e) {
//noinspection CallToPrintStackTrace
e.printStackTrace();
}
}
FileDocumentManager manager = FileDocumentManager.getInstance();
if (manager instanceof FileDocumentManagerImpl) {
((FileDocumentManagerImpl)manager).dropAllUnsavedDocuments();
}
})).
append(() -> assertFalse(PsiManager.getInstance(project).isDisposed())).
append(() -> {
EncodingManager encodingManager = EncodingManager.getInstance();
if (encodingManager instanceof EncodingManagerImpl) {
((EncodingManagerImpl)encodingManager).clearDocumentQueue();
}
if (!ourAssertionsInTestDetected) {
if (IdeaLogger.ourErrorsOccurred != null) {
throw IdeaLogger.ourErrorsOccurred;
}
}
}).
append(() -> clearUncommittedDocuments(project)).
append(() -> ((HintManagerImpl)HintManager.getInstance()).cleanup()).
append(() -> DocumentCommitThread.getInstance().clearQueue()).
append(() -> ((UndoManagerImpl)UndoManager.getGlobalInstance()).dropHistoryInTests()).
append(() -> ((UndoManagerImpl)UndoManager.getInstance(project)).dropHistoryInTests()).
append(() -> ((DocumentReferenceManagerImpl)DocumentReferenceManager.getInstance()).cleanupForNextTest()).
append(() -> TemplateDataLanguageMappings.getInstance(project).cleanupForNextTest()).
append(() -> ((PsiManagerImpl)PsiManager.getInstance(project)).cleanupForNextTest()).
append(() -> ProjectManagerEx.getInstanceEx().closeTestProject(project)).
append(() -> application.setDataProvider(null)).
append(() -> ourTestCase = null).
append(() -> CompletionProgressIndicator.cleanupForNextTest()).
append(() -> {
if (ourTestCount++ % 100 == 0) {
// some tests are written in Groovy, and running all of them may result in some 40M of memory wasted on bean infos
// so let's clear the cache every now and then to ensure it doesn't grow too large
GCUtil.clearBeanInfoCache();
}
}).
run();
}
private static int ourTestCount;
private static void checkJavaSwingTimersAreDisposed() throws Exception {
Class<?> TimerQueueClass = Class.forName("javax.swing.TimerQueue");
Method sharedInstance = ReflectionUtil.getMethod(TimerQueueClass, "sharedInstance");
Object timerQueue = sharedInstance.invoke(null);
DelayQueue delayQueue = ReflectionUtil.getField(TimerQueueClass, timerQueue, DelayQueue.class, "queue");
Delayed timer = delayQueue.peek();
if (timer != null) {
long delay = timer.getDelay(TimeUnit.MILLISECONDS);
String text = "(delayed for " + delay + "ms)";
Method getTimer = ReflectionUtil.getDeclaredMethod(timer.getClass(), "getTimer");
Timer swingTimer = (Timer)getTimer.invoke(timer);
text = "Timer (listeners: "+Arrays.asList(swingTimer.getActionListeners()) + ") "+text;
throw new AssertionFailedError("Not disposed java.swing.Timer: " + text + "; queue:" + timerQueue);
}
}
public static void clearUncommittedDocuments(@NotNull Project project) {
PsiDocumentManagerImpl documentManager = (PsiDocumentManagerImpl)PsiDocumentManager.getInstance(project);
documentManager.clearUncommittedDocuments();
ProjectManagerImpl projectManager = (ProjectManagerImpl)ProjectManager.getInstance();
if (projectManager.isDefaultProjectInitialized()) {
Project defaultProject = projectManager.getDefaultProject();
((PsiDocumentManagerImpl)PsiDocumentManager.getInstance(defaultProject)).clearUncommittedDocuments();
}
}
public static void checkEditorsReleased() {
// don't use method references here to make stack trace reading easier
//noinspection Convert2MethodRef
new RunAll(
() -> UIUtil.dispatchAllInvocationEvents(),
() -> {
RunAll runAll = new RunAll();
for (Editor editor : EditorFactory.getInstance().getAllEditors()) {
runAll = runAll
.append(() -> EditorFactoryImpl.throwNotReleasedError(editor))
.append(() -> EditorFactory.getInstance().releaseEditor(editor));
}
runAll.run();
}).run();
}
@Override
public final void runBare() throws Throwable {
runBareImpl(this::startRunAndTear);
}
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod")
protected void runBareImpl(ThrowableRunnable<?> start) throws Exception {
if (!shouldRunTest()) {
return;
}
TestRunnerUtil.replaceIdeEventQueueSafely();
EdtTestUtil.runInEdtAndWait(() -> {
try {
ourTestThread = Thread.currentThread();
start.run();
}
finally {
ourTestThread = null;
try {
Application application = ApplicationManager.getApplication();
if (application instanceof ApplicationEx) {
PlatformTestCase.cleanupApplicationCaches(ourProject);
}
resetAllFields();
}
catch (Throwable e) {
//noinspection CallToPrintStackTrace
e.printStackTrace();
}
}
});
// just to make sure all deferred Runnables to finish
SwingUtilities.invokeAndWait(EmptyRunnable.getInstance());
if (IdeaLogger.ourErrorsOccurred != null) {
throw IdeaLogger.ourErrorsOccurred;
}
}
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod")
private void startRunAndTear() throws Throwable {
setUp();
try {
ourAssertionsInTestDetected = true;
runTest();
ourAssertionsInTestDetected = false;
}
finally {
//try{
tearDown();
//}
//catch(Throwable th){
// noinspection CallToPrintStackTrace
//th.printStackTrace();
//}
}
}
@Override
public Object getData(String dataId) {
return ourProject == null || ourProject.isDisposed() ? null : new TestDataProvider(ourProject).getData(dataId);
}
protected Sdk getProjectJDK() {
return null;
}
@NotNull
protected ModuleType getModuleType() {
return EmptyModuleType.getInstance();
}
/**
* Creates dummy source file. One is not placed under source root so some PSI functions like resolve to external classes
* may not work. Though it works significantly faster and yet can be used if you need to create some PSI structures for
* test purposes
*
* @param fileName - name of the file to create. Extension is used to choose what PSI should be created like java, jsp, aj, xml etc.
* @param text - file text.
* @return dummy psi file.
*
*/
@NotNull
protected static PsiFile createFile(@NonNls @NotNull String fileName, @NonNls @NotNull String text) throws IncorrectOperationException {
FileType fileType = FileTypeManager.getInstance().getFileTypeByFileName(fileName);
return PsiFileFactory.getInstance(getProject())
.createFileFromText(fileName, fileType, text, LocalTimeCounter.currentTime(), true, false);
}
@NotNull
protected static PsiFile createLightFile(@NonNls @NotNull String fileName, @NotNull String text) throws IncorrectOperationException {
FileType fileType = FileTypeManager.getInstance().getFileTypeByFileName(fileName);
return PsiFileFactory.getInstance(getProject())
.createFileFromText(fileName, fileType, text, LocalTimeCounter.currentTime(), false, false);
}
/**
* Convenient conversion of testSomeTest -> someTest | SomeTest where testSomeTest is the name of current test.
*
* @param lowercaseFirstLetter - whether first letter after test should be lowercased.
*/
@NotNull
@Override
protected String getTestName(boolean lowercaseFirstLetter) {
String name = getName();
assertTrue("Test name should start with 'test': " + name, name.startsWith("test"));
name = name.substring("test".length());
if (!name.isEmpty() && lowercaseFirstLetter && !PlatformTestUtil.isAllUppercaseName(name)) {
name = Character.toLowerCase(name.charAt(0)) + name.substring(1);
}
return name;
}
protected static void commitDocument(@NotNull Document document) {
PsiDocumentManager.getInstance(getProject()).commitDocument(document);
}
protected static void commitAllDocuments() {
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
}
@NotNull
@Override
protected CodeStyleSettings getCurrentCodeStyleSettings() {
return CodeStyleSettingsManager.getSettings(getProject());
}
protected static Document getDocument(@NotNull PsiFile file) {
return PsiDocumentManager.getInstance(getProject()).getDocument(file);
}
@SuppressWarnings("NonPrivateFieldAccessedInSynchronizedContext")
public static synchronized void closeAndDeleteProject() {
if (ourProject == null) {
return;
}
if (ApplicationManager.getApplication().isWriteAccessAllowed()) {
throw new IllegalStateException("Must not call closeAndDeleteProject from under write action");
}
if (!ourProject.isDisposed()) {
assertEquals(ourProject, ourModule.getProject());
@SuppressWarnings("ConstantConditions")
File ioFile = new File(ourProject.getProjectFilePath());
if (ioFile.exists()) {
File dir = ioFile.getParentFile();
if (dir.getName().startsWith(UsefulTestCase.TEMP_DIR_MARKER)) {
FileUtil.delete(dir);
}
else {
FileUtil.delete(ioFile);
}
}
}
assertTrue(ProjectManagerEx.getInstanceEx().closeAndDispose(ourProject));
assertTrue(ourProject.isDisposed());
// project may be disposed but empty folder may still be there
if (ourPathToKeep != null) {
File parent = new File(ourPathToKeep).getParentFile();
if (parent.getName().startsWith(UsefulTestCase.TEMP_DIR_MARKER)) {
// delete only empty folders
//noinspection ResultOfMethodCallIgnored
parent.delete();
}
}
ourProject = null;
assertTrue(ourModule.isDisposed());
ourModule = null;
if (ourPsiManager != null) {
assertTrue(ourPsiManager.isDisposed());
ourPsiManager = null;
}
ourPathToKeep = null;
}
private static class SimpleLightProjectDescriptor extends LightProjectDescriptor {
@NotNull private final ModuleType myModuleType;
@Nullable private final Sdk mySdk;
SimpleLightProjectDescriptor(@NotNull ModuleType moduleType, @Nullable Sdk sdk) {
myModuleType = moduleType;
mySdk = sdk;
}
@NotNull
@Override
public ModuleType getModuleType() {
return myModuleType;
}
@Nullable
@Override
public Sdk getSdk() {
return mySdk;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SimpleLightProjectDescriptor that = (SimpleLightProjectDescriptor)o;
if (!myModuleType.equals(that.myModuleType)) return false;
return areJdksEqual(that.getSdk());
}
@Override
public int hashCode() {
return myModuleType.hashCode();
}
private boolean areJdksEqual(final Sdk newSdk) {
if (mySdk == null || newSdk == null) return mySdk == newSdk;
final String[] myUrls = mySdk.getRootProvider().getUrls(OrderRootType.CLASSES);
final String[] newUrls = newSdk.getRootProvider().getUrls(OrderRootType.CLASSES);
return ContainerUtil.newHashSet(myUrls).equals(ContainerUtil.newHashSet(newUrls));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.impl;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.zip.GZIPInputStream;
import java.util.zip.InflaterInputStream;
import org.apache.http.Header;
import org.apache.http.HeaderElement;
import org.apache.http.HttpEntity;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.HttpResponse;
import org.apache.http.HttpResponseInterceptor;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.HttpClient;
import org.apache.http.client.params.ClientParamBean;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.conn.ssl.X509HostnameVerifier;
import org.apache.http.entity.HttpEntityWrapper;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.client.SystemDefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; // jdoc
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.protocol.HttpContext;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class for creating/configuring httpclient instances.
*/
public class HttpClientUtil {
// socket timeout measured in ms, closes a socket if read
// takes longer than x ms to complete. throws
// java.net.SocketTimeoutException: Read timed out exception
public static final String PROP_SO_TIMEOUT = "socketTimeout";
// connection timeout measures in ms, closes a socket if connection
// cannot be established within x ms. with a
// java.net.SocketTimeoutException: Connection timed out
public static final String PROP_CONNECTION_TIMEOUT = "connTimeout";
// Maximum connections allowed per host
public static final String PROP_MAX_CONNECTIONS_PER_HOST = "maxConnectionsPerHost";
// Maximum total connections allowed
public static final String PROP_MAX_CONNECTIONS = "maxConnections";
// Retry http requests on error
public static final String PROP_USE_RETRY = "retry";
// Allow compression (deflate,gzip) if server supports it
public static final String PROP_ALLOW_COMPRESSION = "allowCompression";
// Follow redirects
public static final String PROP_FOLLOW_REDIRECTS = "followRedirects";
// Basic auth username
public static final String PROP_BASIC_AUTH_USER = "httpBasicAuthUser";
// Basic auth password
public static final String PROP_BASIC_AUTH_PASS = "httpBasicAuthPassword";
public static final String SYS_PROP_CHECK_PEER_NAME = "solr.ssl.checkPeerName";
private static final Logger logger = LoggerFactory
.getLogger(HttpClientUtil.class);
static final DefaultHttpRequestRetryHandler NO_RETRY = new DefaultHttpRequestRetryHandler(
0, false);
private static HttpClientConfigurer configurer = new HttpClientConfigurer();
private static final List<HttpRequestInterceptor> interceptors = Collections.synchronizedList(new ArrayList<HttpRequestInterceptor>());
/**
* Replace the {@link HttpClientConfigurer} class used in configuring the http
* clients with a custom implementation.
*/
public static void setConfigurer(HttpClientConfigurer newConfigurer) {
configurer = newConfigurer;
}
public static HttpClientConfigurer getConfigurer() {
return configurer;
}
/**
* Creates new http client by using the provided configuration.
*
* @param params
* http client configuration, if null a client with default
* configuration (no additional configuration) is created.
*/
public static CloseableHttpClient createClient(final SolrParams params) {
final ModifiableSolrParams config = new ModifiableSolrParams(params);
if (logger.isDebugEnabled()) {
logger.debug("Creating new http client, config:" + config);
}
final DefaultHttpClient httpClient = HttpClientFactory.createHttpClient();
configureClient(httpClient, config);
return httpClient;
}
/**
* Creates new http client by using the provided configuration.
*
*/
public static CloseableHttpClient createClient(final SolrParams params, ClientConnectionManager cm) {
final ModifiableSolrParams config = new ModifiableSolrParams(params);
if (logger.isDebugEnabled()) {
logger.debug("Creating new http client, config:" + config);
}
final DefaultHttpClient httpClient = HttpClientFactory.createHttpClient(cm);
configureClient(httpClient, config);
return httpClient;
}
/**
* Configures {@link DefaultHttpClient}, only sets parameters if they are
* present in config.
*/
public static void configureClient(final DefaultHttpClient httpClient,
SolrParams config) {
configurer.configure(httpClient, config);
synchronized(interceptors) {
for(HttpRequestInterceptor interceptor: interceptors) {
httpClient.addRequestInterceptor(interceptor);
}
}
}
public static void close(HttpClient httpClient) {
if (httpClient instanceof CloseableHttpClient) {
org.apache.solr.common.util.IOUtils.closeQuietly((CloseableHttpClient) httpClient);
} else {
httpClient.getConnectionManager().shutdown();
}
}
public static void addRequestInterceptor(HttpRequestInterceptor interceptor) {
interceptors.add(interceptor);
}
public static void removeRequestInterceptor(HttpRequestInterceptor interceptor) {
interceptors.remove(interceptor);
}
/**
* Control HTTP payload compression.
*
* @param allowCompression
* true will enable compression (needs support from server), false
* will disable compression.
*/
public static void setAllowCompression(DefaultHttpClient httpClient,
boolean allowCompression) {
httpClient
.removeRequestInterceptorByClass(UseCompressionRequestInterceptor.class);
httpClient
.removeResponseInterceptorByClass(UseCompressionResponseInterceptor.class);
if (allowCompression) {
httpClient.addRequestInterceptor(new UseCompressionRequestInterceptor());
httpClient
.addResponseInterceptor(new UseCompressionResponseInterceptor());
}
}
/**
* Set http basic auth information. If basicAuthUser or basicAuthPass is null
* the basic auth configuration is cleared. Currently this is not preemtive
* authentication. So it is not currently possible to do a post request while
* using this setting.
*/
public static void setBasicAuth(DefaultHttpClient httpClient,
String basicAuthUser, String basicAuthPass) {
if (basicAuthUser != null && basicAuthPass != null) {
httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(basicAuthUser, basicAuthPass));
} else {
httpClient.getCredentialsProvider().clear();
}
}
/**
* Set max connections allowed per host. This call will only work when
* {@link ThreadSafeClientConnManager} or
* {@link PoolingClientConnectionManager} is used.
*/
public static void setMaxConnectionsPerHost(HttpClient httpClient,
int max) {
// would have been nice if there was a common interface
if (httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
ThreadSafeClientConnManager mgr = (ThreadSafeClientConnManager)httpClient.getConnectionManager();
mgr.setDefaultMaxPerRoute(max);
} else if (httpClient.getConnectionManager() instanceof PoolingClientConnectionManager) {
PoolingClientConnectionManager mgr = (PoolingClientConnectionManager)httpClient.getConnectionManager();
mgr.setDefaultMaxPerRoute(max);
}
}
/**
* Set max total connections allowed. This call will only work when
* {@link ThreadSafeClientConnManager} or
* {@link PoolingClientConnectionManager} is used.
*/
public static void setMaxConnections(final HttpClient httpClient,
int max) {
// would have been nice if there was a common interface
if (httpClient.getConnectionManager() instanceof ThreadSafeClientConnManager) {
ThreadSafeClientConnManager mgr = (ThreadSafeClientConnManager)httpClient.getConnectionManager();
mgr.setMaxTotal(max);
} else if (httpClient.getConnectionManager() instanceof PoolingClientConnectionManager) {
PoolingClientConnectionManager mgr = (PoolingClientConnectionManager)httpClient.getConnectionManager();
mgr.setMaxTotal(max);
}
}
/**
* Defines the socket timeout (SO_TIMEOUT) in milliseconds. A timeout value of
* zero is interpreted as an infinite timeout.
*
* @param timeout timeout in milliseconds
*/
public static void setSoTimeout(HttpClient httpClient, int timeout) {
HttpConnectionParams.setSoTimeout(httpClient.getParams(),
timeout);
}
/**
* Control retry handler
* @param useRetry when false the client will not try to retry failed requests.
*/
public static void setUseRetry(final DefaultHttpClient httpClient,
boolean useRetry) {
if (!useRetry) {
httpClient.setHttpRequestRetryHandler(NO_RETRY);
} else {
// if the request is not fully sent, we retry
// streaming updates are not a problem, because they are not retryable
httpClient.setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler(){
@Override
protected boolean handleAsIdempotent(final HttpRequest request) {
return false; // we can't tell if a Solr request is idempotent
}
});
}
}
/**
* Set connection timeout. A timeout value of zero is interpreted as an
* infinite timeout.
*
* @param timeout
* connection Timeout in milliseconds
*/
public static void setConnectionTimeout(final HttpClient httpClient,
int timeout) {
HttpConnectionParams.setConnectionTimeout(httpClient.getParams(),
timeout);
}
/**
* Set follow redirects.
*
* @param followRedirects When true the client will follow redirects.
*/
public static void setFollowRedirects(HttpClient httpClient,
boolean followRedirects) {
new ClientParamBean(httpClient.getParams()).setHandleRedirects(followRedirects);
}
public static void setHostNameVerifier(DefaultHttpClient httpClient,
X509HostnameVerifier hostNameVerifier) {
Scheme httpsScheme = httpClient.getConnectionManager().getSchemeRegistry().get("https");
if (httpsScheme != null) {
SSLSocketFactory sslSocketFactory = (SSLSocketFactory) httpsScheme.getSchemeSocketFactory();
sslSocketFactory.setHostnameVerifier(hostNameVerifier);
}
}
public static void setStaleCheckingEnabled(final HttpClient httpClient, boolean enabled) {
HttpConnectionParams.setStaleCheckingEnabled(httpClient.getParams(), enabled);
}
public static void setTcpNoDelay(final HttpClient httpClient, boolean tcpNoDelay) {
HttpConnectionParams.setTcpNoDelay(httpClient.getParams(), tcpNoDelay);
}
private static class UseCompressionRequestInterceptor implements
HttpRequestInterceptor {
@Override
public void process(HttpRequest request, HttpContext context)
throws HttpException, IOException {
if (!request.containsHeader("Accept-Encoding")) {
request.addHeader("Accept-Encoding", "gzip, deflate");
}
}
}
private static class UseCompressionResponseInterceptor implements
HttpResponseInterceptor {
@Override
public void process(final HttpResponse response, final HttpContext context)
throws HttpException, IOException {
HttpEntity entity = response.getEntity();
Header ceheader = entity.getContentEncoding();
if (ceheader != null) {
HeaderElement[] codecs = ceheader.getElements();
for (int i = 0; i < codecs.length; i++) {
if (codecs[i].getName().equalsIgnoreCase("gzip")) {
response
.setEntity(new GzipDecompressingEntity(response.getEntity()));
return;
}
if (codecs[i].getName().equalsIgnoreCase("deflate")) {
response.setEntity(new DeflateDecompressingEntity(response
.getEntity()));
return;
}
}
}
}
}
private static class GzipDecompressingEntity extends HttpEntityWrapper {
public GzipDecompressingEntity(final HttpEntity entity) {
super(entity);
}
@Override
public InputStream getContent() throws IOException, IllegalStateException {
return new GZIPInputStream(wrappedEntity.getContent());
}
@Override
public long getContentLength() {
return -1;
}
}
private static class DeflateDecompressingEntity extends
GzipDecompressingEntity {
public DeflateDecompressingEntity(final HttpEntity entity) {
super(entity);
}
@Override
public InputStream getContent() throws IOException, IllegalStateException {
return new InflaterInputStream(wrappedEntity.getContent());
}
}
public static class HttpClientFactory {
private static Class<? extends DefaultHttpClient> defaultHttpClientClass = DefaultHttpClient.class;
private static Class<? extends SystemDefaultHttpClient> systemDefaultHttpClientClass = SystemDefaultHttpClient.class;
public static SystemDefaultHttpClient createHttpClient() {
Constructor<? extends SystemDefaultHttpClient> constructor;
try {
constructor = systemDefaultHttpClientClass.getDeclaredConstructor();
return constructor.newInstance();
} catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to create HttpClient instance. ", e);
}
}
public static DefaultHttpClient createHttpClient(ClientConnectionManager cm) {
Constructor<? extends DefaultHttpClient> constructor;
try {
constructor = defaultHttpClientClass.getDeclaredConstructor(new Class[]{ClientConnectionManager.class});
return constructor.newInstance(new Object[]{cm});
} catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to create HttpClient instance, registered class is: " + defaultHttpClientClass, e);
}
}
public static void setHttpClientImpl(Class<? extends DefaultHttpClient> defaultHttpClient, Class<? extends SystemDefaultHttpClient> systemDefaultHttpClient) {
defaultHttpClientClass = defaultHttpClient;
systemDefaultHttpClientClass = systemDefaultHttpClient;
}
}
}
| |
package com.mikepenz.materialdrawer.model;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.mikepenz.iconics.IconicsDrawable;
import com.mikepenz.iconics.typeface.IIcon;
import com.mikepenz.materialdrawer.R;
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
import com.mikepenz.materialdrawer.model.interfaces.IProfile;
import com.mikepenz.materialdrawer.model.interfaces.Identifyable;
import com.mikepenz.materialdrawer.model.interfaces.Tagable;
import com.mikepenz.materialdrawer.util.UIUtils;
/**
* Created by mikepenz on 03.02.15.
*/
public class ProfileSettingDrawerItem implements IDrawerItem, IProfile<ProfileSettingDrawerItem>, Tagable<ProfileSettingDrawerItem>, Identifyable<ProfileSettingDrawerItem> {
private int identifier = -1;
private boolean selectable = false;
private Drawable icon;
private IIcon iicon;
private String name;
private String email;
private boolean enabled = true;
private Object tag;
private int selectedColor = 0;
private int selectedColorRes = -1;
private int textColor = 0;
private int textColorRes = -1;
public ProfileSettingDrawerItem withIdentifier(int identifier) {
this.identifier = identifier;
return this;
}
public ProfileSettingDrawerItem withIcon(Drawable icon) {
this.icon = icon;
return this;
}
public ProfileSettingDrawerItem withIcon(IIcon iicon) {
this.iicon = iicon;
return this;
}
public ProfileSettingDrawerItem withName(String name) {
this.name = name;
return this;
}
public ProfileSettingDrawerItem withDescription(String description) {
this.email = description;
return this;
}
//NOTE we reuse the IProfile here to allow custom items within the AccountSwitcher. There is an alias method withDescription for this
public ProfileSettingDrawerItem withEmail(String email) {
this.email = email;
return this;
}
public ProfileSettingDrawerItem withTag(Object object) {
this.tag = object;
return this;
}
public ProfileSettingDrawerItem setEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public ProfileSettingDrawerItem withSelectedColor(int selectedColor) {
this.selectedColor = selectedColor;
return this;
}
public ProfileSettingDrawerItem withSelectedColorRes(int selectedColorRes) {
this.selectedColorRes = selectedColorRes;
return this;
}
public ProfileSettingDrawerItem withTextColor(int textColor) {
this.textColor = textColor;
return this;
}
public ProfileSettingDrawerItem withTextColorRes(int textColorRes) {
this.textColorRes = textColorRes;
return this;
}
@Override
public ProfileSettingDrawerItem withSelectable(boolean selectable) {
this.selectable = selectable;
return this;
}
public int getSelectedColor() {
return selectedColor;
}
public void setSelectedColor(int selectedColor) {
this.selectedColor = selectedColor;
}
public int getSelectedColorRes() {
return selectedColorRes;
}
public void setSelectedColorRes(int selectedColorRes) {
this.selectedColorRes = selectedColorRes;
}
public int getTextColor() {
return textColor;
}
public void setTextColor(int textColor) {
this.textColor = textColor;
}
public int getTextColorRes() {
return textColorRes;
}
public void setTextColorRes(int textColorRes) {
this.textColorRes = textColorRes;
}
@Override
public Object getTag() {
return tag;
}
@Override
public void setTag(Object tag) {
this.tag = tag;
}
public Drawable getIcon() {
return icon;
}
public void setIcon(Drawable icon) {
this.icon = icon;
}
public IIcon getIIcon() {
return iicon;
}
public void setIIcon(IIcon iicon) {
this.iicon = iicon;
}
@Override
public boolean isSelectable() {
return selectable;
}
@Override
public ProfileSettingDrawerItem setSelectable(boolean selectable) {
this.selectable = selectable;
return this;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getDescription() {
return email;
}
public void setDescription(String description) {
this.email = email;
}
@Override
public int getIdentifier() {
return identifier;
}
public void setIdentifier(int identifier) {
this.identifier = identifier;
}
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public String getType() {
return "PROFILE_SETTING2_ITEM";
}
@Override
public int getLayoutRes() {
return R.layout.material_drawer_item_profile_setting;
}
@Override
public View convertView(LayoutInflater inflater, View convertView, ViewGroup parent) {
Context ctx = parent.getContext();
ViewHolder viewHolder;
if (convertView == null) {
convertView = inflater.inflate(getLayoutRes(), parent, false);
viewHolder = new ViewHolder(convertView);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
int selected_color = selectedColor;
if (selected_color == 0 && selectedColorRes != -1) {
selected_color = ctx.getResources().getColor(selectedColorRes);
} else if (selected_color == 0) {
selected_color = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_selected, R.color.material_drawer_selected);
}
UIUtils.setBackground(viewHolder.view, UIUtils.getDrawerItemBackground(ctx, selected_color));
viewHolder.name.setText(this.getName());
int color = textColor;
if (color == 0 && textColorRes != -1) {
color = ctx.getResources().getColor(textColorRes);
} else if (color == 0) {
color = UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_primary_text, R.color.material_drawer_primary_text);
}
viewHolder.name.setTextColor(color);
if (this.getIcon() != null) {
viewHolder.icon.setImageDrawable(this.getIcon());
viewHolder.icon.setVisibility(View.VISIBLE);
} else if (this.getIIcon() != null) {
viewHolder.icon.setImageDrawable(new IconicsDrawable(ctx, this.getIIcon()).color(color).actionBarSize().paddingDp(2));
viewHolder.icon.setVisibility(View.VISIBLE);
} else {
viewHolder.icon.setVisibility(View.GONE);
}
return convertView;
}
private static class ViewHolder {
private View view;
private ImageView icon;
private TextView name;
private ViewHolder(View view) {
this.view = view;
this.icon = (ImageView) view.findViewById(R.id.icon);
this.name = (TextView) view.findViewById(R.id.name);
}
}
}
| |
package com.diandi.dragmenu;
import android.content.Context;
import android.graphics.Color;
import android.graphics.PorterDuff.Mode;
import android.support.v4.view.GestureDetectorCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import com.diandi.view.R;
import com.nineoldandroids.view.ViewHelper;
public class DragLayout extends FrameLayout {
private final static String TAG = "DragLayout";
private boolean isShowShadow = true;
private GestureDetectorCompat gestureDetector;
private ViewDragHelper dragHelper;
private DragListener dragListener;
private int range;
private int width;
private int height;
private int mainLeft;
private Context context;
private ImageView iv_shadow;
private RelativeLayout vg_left;
private DragMainLayout vg_main;
private ViewDragHelper.Callback dragHelperCallback = new ViewDragHelper.Callback() {
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
if (mainLeft + dx < 0) {
return 0;
} else if (mainLeft + dx > range) {
return range;
} else {
return left;
}
}
@Override
public boolean tryCaptureView(View child, int pointerId) {
return true;
}
@Override
public int getViewHorizontalDragRange(View child) {
return width;
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
super.onViewReleased(releasedChild, xvel, yvel);
if (xvel > 0) {
open();
} else if (xvel < 0) {
close();
} else if (releasedChild == vg_main && mainLeft > range * 0.3) {
open();
} else if (releasedChild == vg_left && mainLeft > range * 0.7) {
open();
} else {
close();
}
}
@Override
public void onViewPositionChanged(View changedView, int left, int top,
int dx, int dy) {
if (changedView == vg_main) {
mainLeft = left;
} else {
mainLeft = mainLeft + left;
}
if (mainLeft < 0) {
mainLeft = 0;
} else if (mainLeft > range) {
mainLeft = range;
}
if (isShowShadow) {
iv_shadow.layout(mainLeft, 0, mainLeft + width, height);
}
if (changedView == vg_left) {
vg_left.layout(0, 0, width, height);
vg_main.layout(mainLeft, 0, mainLeft + width, height);
}
dispatchDragEvent(mainLeft);
}
};
private Status status = Status.Close;
public DragLayout(Context context) {
this(context, null);
}
public DragLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
this.context = context;
}
public DragLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
gestureDetector = new GestureDetectorCompat(context, new YScrollDetector());
dragHelper = ViewDragHelper.create(this, dragHelperCallback);
}
public void setDragListener(DragListener dragListener) {
this.dragListener = dragListener;
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
if (isShowShadow) {
iv_shadow = new ImageView(context);
iv_shadow.setImageResource(R.drawable.shadow);
LayoutParams lp = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
addView(iv_shadow, 1, lp);
}
vg_left = (RelativeLayout) getChildAt(0);
vg_main = (DragMainLayout) getChildAt(isShowShadow ? 2 : 1);
vg_main.setDragLayout(this);
vg_left.setClickable(true);
vg_main.setClickable(true);
}
public ViewGroup getVg_main() {
return vg_main;
}
public ViewGroup getVg_left() {
return vg_left;
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
width = vg_left.getMeasuredWidth();
height = vg_left.getMeasuredHeight();
range = (int) (width * 0.6f);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
vg_left.layout(0, 0, width, height);
vg_main.layout(mainLeft, 0, mainLeft + width, height);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
return dragHelper.shouldInterceptTouchEvent(ev) && gestureDetector.onTouchEvent(ev);
}
@Override
public boolean onTouchEvent(MotionEvent e) {
try {
dragHelper.processTouchEvent(e);
} catch (Exception ex) {
ex.printStackTrace();
}
return false;
}
private void dispatchDragEvent(int mainLeft) {
if (dragListener == null) {
return;
}
float percent = mainLeft / (float) range;
animateView(percent);
dragListener.onDrag(percent);
Status lastStatus = status;
if (lastStatus != getStatus() && status == Status.Close) {
dragListener.onClose();
} else if (lastStatus != getStatus() && status == Status.Open) {
dragListener.onOpen();
}
}
private void animateView(float percent) {
float f1 = 1 - percent * 0.3f;
ViewHelper.setScaleX(vg_main, f1);
ViewHelper.setScaleY(vg_main, f1);
ViewHelper.setTranslationX(vg_left, -vg_left.getWidth() / 2.3f + vg_left.getWidth() / 2.3f * percent);
ViewHelper.setScaleX(vg_left, 0.5f + 0.5f * percent);
ViewHelper.setScaleY(vg_left, 0.5f + 0.5f * percent);
ViewHelper.setAlpha(vg_left, percent);
if (isShowShadow) {
ViewHelper.setScaleX(iv_shadow, f1 * 1.4f * (1 - percent * 0.12f));
ViewHelper.setScaleY(iv_shadow, f1 * 1.85f * (1 - percent * 0.12f));
}
getBackground().setColorFilter(evaluate(percent, Color.BLACK, Color.TRANSPARENT), Mode.SRC_OVER);
}
private Integer evaluate(float fraction, Object startValue, Integer endValue) {
int startInt = (Integer) startValue;
int startA = (startInt >> 24) & 0xff;
int startR = (startInt >> 16) & 0xff;
int startG = (startInt >> 8) & 0xff;
int startB = startInt & 0xff;
int endInt = (Integer) endValue;
int endA = (endInt >> 24) & 0xff;
int endR = (endInt >> 16) & 0xff;
int endG = (endInt >> 8) & 0xff;
int endB = endInt & 0xff;
return ((startA + (int) (fraction * (endA - startA))) << 24)
| ((startR + (int) (fraction * (endR - startR))) << 16)
| ((startG + (int) (fraction * (endG - startG))) << 8)
| ((startB + (int) (fraction * (endB - startB))));
}
@Override
public void computeScroll() {
if (dragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
public Status getStatus() {
if (mainLeft == 0) {
status = Status.Close;
} else if (mainLeft == range) {
status = Status.Open;
} else {
status = Status.Drag;
}
return status;
}
public void open() {
open(true);
}
public void open(boolean animate) {
if (animate) {
if (dragHelper.smoothSlideViewTo(vg_main, range, 0)) {
ViewCompat.postInvalidateOnAnimation(this);
}
} else {
vg_main.layout(range, 0, range * 2, height);
dispatchDragEvent(range);
}
}
public void close() {
close(true);
}
public void close(boolean animate) {
if (animate) {
if (dragHelper.smoothSlideViewTo(vg_main, 0, 0)) {
ViewCompat.postInvalidateOnAnimation(this);
}
} else {
vg_main.layout(0, 0, width, height);
dispatchDragEvent(0);
}
}
public enum Status {
Drag, Open, Close
}
public interface DragListener {
public void onOpen();
public void onClose();
public void onDrag(float percent);
}
class YScrollDetector extends SimpleOnGestureListener {
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float dx, float dy) {
return Math.abs(dy) <= Math.abs(dx);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datos.vfs.provider;
import com.datos.vfs.util.CryptorFactory;
import com.datos.vfs.FileName;
import com.datos.vfs.FileSystemException;
import com.datos.vfs.FileType;
import com.datos.vfs.util.Cryptor;
/**
* Implementation for any url based filesystem.
* <p>
* Parses the url into user/password/host/port/path.
* Does not handle a query string (after ?)
*
* @see URLFileNameParser URLFileNameParser for the implementation which also handles the query string too
*/
public class HostFileNameParser extends AbstractFileNameParser
{
private final int defaultPort;
public HostFileNameParser(final int defaultPort)
{
this.defaultPort = defaultPort;
}
public int getDefaultPort()
{
return defaultPort;
}
@Override
public FileName parseUri(final VfsComponentContext context, final FileName base, final String filename)
throws FileSystemException
{
// FTP URI are generic URI (as per RFC 2396)
final StringBuilder name = new StringBuilder();
// Extract the scheme and authority parts
final Authority auth = extractToPath(filename, name);
// Decode and normalise the file name
UriParser.canonicalizePath(name, 0, name.length(), this);
UriParser.fixSeparators(name);
final FileType fileType = UriParser.normalisePath(name);
final String path = name.toString();
return new GenericFileName(
auth.scheme,
auth.hostName,
auth.port,
defaultPort,
auth.userName,
auth.password,
path,
fileType);
}
/**
* Extracts the scheme, userinfo, hostname and port components of a
* generic URI.
*
* @param uri The absolute URI to parse.
* @param name Used to return the remainder of the URI.
* @return Authority extracted host authority, never null.
* @throws FileSystemException if authority cannot be extracted.
*/
protected Authority extractToPath(final String uri,
final StringBuilder name)
throws FileSystemException
{
final Authority auth = new Authority();
// Extract the scheme
auth.scheme = UriParser.extractScheme(uri, name);
// Expecting "//"
if (name.length() < 2 || name.charAt(0) != '/' || name.charAt(1) != '/')
{
throw new FileSystemException("vfs.provider/missing-double-slashes.error", uri);
}
name.delete(0, 2);
// Extract userinfo, and split into username and password
final String userInfo = extractUserInfo(name);
final String userName;
final String password;
if (userInfo != null)
{
final int idx = userInfo.indexOf(':');
if (idx == -1)
{
userName = userInfo;
password = null;
}
else
{
userName = userInfo.substring(0, idx);
password = userInfo.substring(idx + 1);
}
}
else
{
userName = null;
password = null;
}
auth.userName = UriParser.decode(userName);
auth.password = UriParser.decode(password);
if (auth.password != null && auth.password.startsWith("{") && auth.password.endsWith("}"))
{
try
{
final Cryptor cryptor = CryptorFactory.getCryptor();
auth.password = cryptor.decrypt(auth.password.substring(1, auth.password.length() - 1));
}
catch (final Exception ex)
{
throw new FileSystemException("Unable to decrypt password", ex);
}
}
// Extract hostname, and normalise (lowercase)
final String hostName = extractHostName(name);
if (hostName == null)
{
throw new FileSystemException("vfs.provider/missing-hostname.error", uri);
}
auth.hostName = hostName.toLowerCase();
// Extract port
auth.port = extractPort(name, uri);
// Expecting '/' or empty name
if (name.length() > 0 && name.charAt(0) != '/')
{
throw new FileSystemException("vfs.provider/missing-hostname-path-sep.error", uri);
}
return auth;
}
/**
* Extracts the user info from a URI.
*
* @param name string buffer with the "scheme://" part has been removed already. Will be modified.
* @return the user information up to the '@' or null.
*/
protected String extractUserInfo(final StringBuilder name)
{
final int maxlen = name.length();
for (int pos = 0; pos < maxlen; pos++)
{
final char ch = name.charAt(pos);
if (ch == '@')
{
// Found the end of the user info
final String userInfo = name.substring(0, pos);
name.delete(0, pos + 1);
return userInfo;
}
if (ch == '/' || ch == '?')
{
// Not allowed in user info
break;
}
}
// Not found
return null;
}
/**
* Extracts the hostname from a URI.
*
* @param name string buffer with the "scheme://[userinfo@]" part has been removed already. Will be modified.
* @return the host name or null.
*/
protected String extractHostName(final StringBuilder name)
{
final int maxlen = name.length();
int pos = 0;
for (; pos < maxlen; pos++)
{
final char ch = name.charAt(pos);
if (ch == '/' || ch == ';' || ch == '?' || ch == ':'
|| ch == '@' || ch == '&' || ch == '=' || ch == '+'
|| ch == '$' || ch == ',')
{
break;
}
}
if (pos == 0)
{
return null;
}
final String hostname = name.substring(0, pos);
name.delete(0, pos);
return hostname;
}
/**
* Extracts the port from a URI.
* @param name string buffer with the "scheme://[userinfo@]hostname" part has been removed already.
* Will be modified.
* @param uri full URI for error reporting.
* @return The port, or -1 if the URI does not contain a port.
* @throws FileSystemException if URI is malformed.
* @throws NumberFormatException if port number cannot be parsed.
*/
protected int extractPort(final StringBuilder name, final String uri) throws FileSystemException
{
if (name.length() < 1 || name.charAt(0) != ':')
{
return -1;
}
final int maxlen = name.length();
int pos = 1;
for (; pos < maxlen; pos++)
{
final char ch = name.charAt(pos);
if (ch < '0' || ch > '9')
{
break;
}
}
final String port = name.substring(1, pos);
name.delete(0, pos);
if (port.length() == 0)
{
throw new FileSystemException("vfs.provider/missing-port.error", uri);
}
return Integer.parseInt(port);
}
/**
* Parsed authority info (scheme, hostname, username/password, port).
*/
protected static class Authority
{
private String scheme;
private String hostName;
private String userName;
private String password;
private int port;
/**
* Get the connection schema.
* @return the connection scheme.
* @since 2.0
*/
public String getScheme()
{
return scheme;
}
/**
* Set the connection schema.
* @param scheme the connection scheme.
* @since 2.0
*/
public void setScheme(final String scheme)
{
this.scheme = scheme;
}
/**
* Get the host name.
* @return the host name.
* @since 2.0
*/
public String getHostName()
{
return hostName;
}
/**
* Set the host name.
* @param hostName the host name.
* @since 2.0
*/
public void setHostName(final String hostName)
{
this.hostName = hostName;
}
/**
* Get the user name.
* @return the user name or null.
* @since 2.0
*/
public String getUserName()
{
return userName;
}
/**
* Set the user name.
* @param userName the user name.
* @since 2.0
*/
public void setUserName(final String userName)
{
this.userName = userName;
}
/**
* Get the user password.
* @return the password or null.
* @since 2.0
*/
public String getPassword()
{
return password;
}
/**
* Set the user password.
* @param password the user password.
* @since 2.0
*/
public void setPassword(final String password)
{
this.password = password;
}
/**
* Get the port.
* @return the port or -1.
* @since 2.0
*/
public int getPort()
{
return port;
}
/**
* Set the connection port.
* @param port the port number or -1.
* @since 2.0
*/
public void setPort(final int port)
{
this.port = port;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.ops;
import java.util.Iterator;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.proto.UserBitShared.MetricValue;
import org.apache.drill.exec.proto.UserBitShared.OperatorProfile;
import org.apache.drill.exec.proto.UserBitShared.OperatorProfile.Builder;
import org.apache.drill.exec.proto.UserBitShared.StreamProfile;
import com.carrotsearch.hppc.IntDoubleHashMap;
import com.carrotsearch.hppc.IntLongHashMap;
import com.carrotsearch.hppc.cursors.IntDoubleCursor;
import com.carrotsearch.hppc.cursors.IntLongCursor;
import com.carrotsearch.hppc.procedures.IntDoubleProcedure;
import com.carrotsearch.hppc.procedures.IntLongProcedure;
public class OperatorStats {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OperatorStats.class);
protected final int operatorId;
protected final int operatorType;
private final BufferAllocator allocator;
private IntLongHashMap longMetrics = new IntLongHashMap();
private IntDoubleHashMap doubleMetrics = new IntDoubleHashMap();
public long[] recordsReceivedByInput;
public long[] batchesReceivedByInput;
private long[] schemaCountByInput;
private boolean inProcessing = false;
private boolean inSetup = false;
private boolean inWait = false;
protected long processingNanos;
protected long setupNanos;
protected long waitNanos;
private long processingMark;
private long setupMark;
private long waitMark;
private long schemas;
private int inputCount;
public OperatorStats(OpProfileDef def, BufferAllocator allocator){
this(def.getOperatorId(), def.getOperatorType(), def.getIncomingCount(), allocator);
}
/**
* Copy constructor to be able to create a copy of existing stats object shell and use it independently
* this is useful if stats have to be updated in different threads, since it is not really
* possible to update such stats as waitNanos, setupNanos and processingNanos across threads
* @param original - OperatorStats object to create a copy from
* @param isClean - flag to indicate whether to start with clean state indicators or inherit those from original object
*/
public OperatorStats(OperatorStats original, boolean isClean) {
this(original.operatorId, original.operatorType, original.inputCount, original.allocator);
if ( !isClean ) {
inProcessing = original.inProcessing;
inSetup = original.inSetup;
inWait = original.inWait;
processingMark = original.processingMark;
setupMark = original.setupMark;
waitMark = original.waitMark;
}
}
private OperatorStats(int operatorId, int operatorType, int inputCount, BufferAllocator allocator) {
super();
this.allocator = allocator;
this.operatorId = operatorId;
this.operatorType = operatorType;
this.inputCount = inputCount;
this.recordsReceivedByInput = new long[inputCount];
this.batchesReceivedByInput = new long[inputCount];
this.schemaCountByInput = new long[inputCount];
}
private String assertionError(String msg){
return String.format("Failure while %s for operator id %d. Currently have states of processing:%s, setup:%s, waiting:%s.", msg, operatorId, inProcessing, inSetup, inWait);
}
/**
* OperatorStats merger - to merge stats from other OperatorStats
* this is needed in case some processing is multithreaded that needs to have
* separate OperatorStats to deal with
* WARN - this will only work for metrics that can be added
* @param from - OperatorStats from where to merge to "this"
* @return OperatorStats - for convenience so one can merge multiple stats in one go
*/
public OperatorStats mergeMetrics(OperatorStats from) {
final IntLongHashMap fromMetrics = from.longMetrics;
final Iterator<IntLongCursor> iter = fromMetrics.iterator();
while (iter.hasNext()) {
final IntLongCursor next = iter.next();
longMetrics.putOrAdd(next.key, next.value, next.value);
}
final IntDoubleHashMap fromDMetrics = from.doubleMetrics;
final Iterator<IntDoubleCursor> iterD = fromDMetrics.iterator();
while (iterD.hasNext()) {
final IntDoubleCursor next = iterD.next();
doubleMetrics.putOrAdd(next.key, next.value, next.value);
}
return this;
}
/**
* Clear stats
*/
public void clear() {
processingNanos = 0l;
setupNanos = 0l;
waitNanos = 0l;
longMetrics.clear();
doubleMetrics.clear();
}
public void startSetup() {
assert !inSetup : assertionError("starting setup");
stopProcessing();
inSetup = true;
setupMark = System.nanoTime();
}
public void stopSetup() {
assert inSetup : assertionError("stopping setup");
startProcessing();
setupNanos += System.nanoTime() - setupMark;
inSetup = false;
}
public void startProcessing() {
assert !inProcessing : assertionError("starting processing");
processingMark = System.nanoTime();
inProcessing = true;
}
public void stopProcessing() {
assert inProcessing : assertionError("stopping processing");
processingNanos += System.nanoTime() - processingMark;
inProcessing = false;
}
public void startWait() {
assert !inWait : assertionError("starting waiting");
stopProcessing();
inWait = true;
waitMark = System.nanoTime();
}
public void stopWait() {
assert inWait : assertionError("stopping waiting");
startProcessing();
waitNanos += System.nanoTime() - waitMark;
inWait = false;
}
public void batchReceived(int inputIndex, long records, boolean newSchema) {
recordsReceivedByInput[inputIndex] += records;
batchesReceivedByInput[inputIndex]++;
if(newSchema){
schemaCountByInput[inputIndex]++;
}
}
public OperatorProfile getProfile() {
final OperatorProfile.Builder b = OperatorProfile //
.newBuilder() //
.setOperatorType(operatorType) //
.setOperatorId(operatorId) //
.setSetupNanos(setupNanos) //
.setProcessNanos(processingNanos)
.setWaitNanos(waitNanos);
if(allocator != null){
b.setPeakLocalMemoryAllocated(allocator.getPeakMemoryAllocation());
}
addAllMetrics(b);
return b.build();
}
public void addAllMetrics(OperatorProfile.Builder builder) {
addStreamProfile(builder);
addLongMetrics(builder);
addDoubleMetrics(builder);
}
public void addStreamProfile(OperatorProfile.Builder builder) {
for(int i = 0; i < recordsReceivedByInput.length; i++){
builder.addInputProfile(StreamProfile.newBuilder().setBatches(batchesReceivedByInput[i]).setRecords(recordsReceivedByInput[i]).setSchemas(this.schemaCountByInput[i]));
}
}
private class LongProc implements IntLongProcedure {
private final OperatorProfile.Builder builder;
public LongProc(Builder builder) {
super();
this.builder = builder;
}
@Override
public void apply(int key, long value) {
builder.addMetric(MetricValue.newBuilder().setMetricId(key).setLongValue(value));
}
}
public void addLongMetrics(OperatorProfile.Builder builder) {
if (longMetrics.size() > 0) {
longMetrics.forEach(new LongProc(builder));
}
}
private class DoubleProc implements IntDoubleProcedure {
private final OperatorProfile.Builder builder;
public DoubleProc(Builder builder) {
super();
this.builder = builder;
}
@Override
public void apply(int key, double value) {
builder.addMetric(MetricValue.newBuilder().setMetricId(key).setDoubleValue(value));
}
}
public void addDoubleMetrics(OperatorProfile.Builder builder) {
if (doubleMetrics.size() > 0) {
doubleMetrics.forEach(new DoubleProc(builder));
}
}
public void addLongStat(MetricDef metric, long value){
longMetrics.putOrAdd(metric.metricId(), value, value);
}
public void addDoubleStat(MetricDef metric, double value){
doubleMetrics.putOrAdd(metric.metricId(), value, value);
}
public void setLongStat(MetricDef metric, long value){
longMetrics.put(metric.metricId(), value);
}
public void setDoubleStat(MetricDef metric, double value){
doubleMetrics.put(metric.metricId(), value);
}
public long getWaitNanos() {
return waitNanos;
}
/**
* Adjust waitNanos based on client calculations
* @param waitNanosOffset - could be negative as well as positive
*/
public void adjustWaitNanos(long waitNanosOffset) {
this.waitNanos += waitNanosOffset;
}
public long getProcessingNanos() {
return processingNanos;
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.AbstractList;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The classes contained within are used internally by the Protocol Buffer
* library and generated message implementations. They are public only because
* those generated messages do not reside in the {@code protobuf} package.
* Others should not use this class directly.
*
* @author kenton@google.com (Kenton Varda)
*/
public class Internal {
protected static final Charset UTF_8 = Charset.forName("UTF-8");
protected static final Charset ISO_8859_1 = Charset.forName("ISO-8859-1");
/**
* Helper called by generated code to construct default values for string
* fields.
* <p>
* The protocol compiler does not actually contain a UTF-8 decoder -- it
* just pushes UTF-8-encoded text around without touching it. The one place
* where this presents a problem is when generating Java string literals.
* Unicode characters in the string literal would normally need to be encoded
* using a Unicode escape sequence, which would require decoding them.
* To get around this, protoc instead embeds the UTF-8 bytes into the
* generated code and leaves it to the runtime library to decode them.
* <p>
* It gets worse, though. If protoc just generated a byte array, like:
* new byte[] {0x12, 0x34, 0x56, 0x78}
* Java actually generates *code* which allocates an array and then fills
* in each value. This is much less efficient than just embedding the bytes
* directly into the bytecode. To get around this, we need another
* work-around. String literals are embedded directly, so protoc actually
* generates a string literal corresponding to the bytes. The easiest way
* to do this is to use the ISO-8859-1 character set, which corresponds to
* the first 256 characters of the Unicode range. Protoc can then use
* good old CEscape to generate the string.
* <p>
* So we have a string literal which represents a set of bytes which
* represents another string. This function -- stringDefaultValue --
* converts from the generated string to the string we actually want. The
* generated code calls this automatically.
*/
public static String stringDefaultValue(String bytes) {
return new String(bytes.getBytes(ISO_8859_1), UTF_8);
}
/**
* Helper called by generated code to construct default values for bytes
* fields.
* <p>
* This is a lot like {@link #stringDefaultValue}, but for bytes fields.
* In this case we only need the second of the two hacks -- allowing us to
* embed raw bytes as a string literal with ISO-8859-1 encoding.
*/
public static ByteString bytesDefaultValue(String bytes) {
return ByteString.copyFrom(bytes.getBytes(ISO_8859_1));
}
/**
* Helper called by generated code to construct default values for bytes
* fields.
* <p>
* This is like {@link #bytesDefaultValue}, but returns a byte array.
*/
public static byte[] byteArrayDefaultValue(String bytes) {
return bytes.getBytes(ISO_8859_1);
}
/**
* Helper called by generated code to construct default values for bytes
* fields.
* <p>
* This is like {@link #bytesDefaultValue}, but returns a ByteBuffer.
*/
public static ByteBuffer byteBufferDefaultValue(String bytes) {
return ByteBuffer.wrap(byteArrayDefaultValue(bytes));
}
/**
* Create a new ByteBuffer and copy all the content of {@code source}
* ByteBuffer to the new ByteBuffer. The new ByteBuffer's limit and
* capacity will be source.capacity(), and its position will be 0.
* Note that the state of {@code source} ByteBuffer won't be changed.
*/
public static ByteBuffer copyByteBuffer(ByteBuffer source) {
// Make a duplicate of the source ByteBuffer and read data from the
// duplicate. This is to avoid affecting the source ByteBuffer's state.
ByteBuffer temp = source.duplicate();
// We want to copy all the data in the source ByteBuffer, not just the
// remaining bytes.
temp.clear();
ByteBuffer result = ByteBuffer.allocate(temp.capacity());
result.put(temp);
result.clear();
return result;
}
/**
* Helper called by generated code to determine if a byte array is a valid
* UTF-8 encoded string such that the original bytes can be converted to
* a String object and then back to a byte array round tripping the bytes
* without loss. More precisely, returns {@code true} whenever:
* <pre> {@code
* Arrays.equals(byteString.toByteArray(),
* new String(byteString.toByteArray(), "UTF-8").getBytes("UTF-8"))
* }</pre>
*
* <p>This method rejects "overlong" byte sequences, as well as
* 3-byte sequences that would map to a surrogate character, in
* accordance with the restricted definition of UTF-8 introduced in
* Unicode 3.1. Note that the UTF-8 decoder included in Oracle's
* JDK has been modified to also reject "overlong" byte sequences,
* but currently (2011) still accepts 3-byte surrogate character
* byte sequences.
*
* <p>See the Unicode Standard,</br>
* Table 3-6. <em>UTF-8 Bit Distribution</em>,</br>
* Table 3-7. <em>Well Formed UTF-8 Byte Sequences</em>.
*
* <p>As of 2011-02, this method simply returns the result of {@link
* ByteString#isValidUtf8()}. Calling that method directly is preferred.
*
* @param byteString the string to check
* @return whether the byte array is round trippable
*/
public static boolean isValidUtf8(ByteString byteString) {
return byteString.isValidUtf8();
}
/**
* Like {@link #isValidUtf8(ByteString)} but for byte arrays.
*/
public static boolean isValidUtf8(byte[] byteArray) {
return Utf8.isValidUtf8(byteArray);
}
/**
* Helper method to get the UTF-8 bytes of a string.
*/
public static byte[] toByteArray(String value) {
return value.getBytes(UTF_8);
}
/**
* Helper method to convert a byte array to a string using UTF-8 encoding.
*/
public static String toStringUtf8(byte[] bytes) {
return new String(bytes, UTF_8);
}
/**
* Interface for an enum value or value descriptor, to be used in FieldSet.
* The lite library stores enum values directly in FieldSets but the full
* library stores EnumValueDescriptors in order to better support reflection.
*/
public interface EnumLite {
int getNumber();
}
/**
* Interface for an object which maps integers to {@link EnumLite}s.
* {@link Descriptors.EnumDescriptor} implements this interface by mapping
* numbers to {@link Descriptors.EnumValueDescriptor}s. Additionally,
* every generated enum type has a static method internalGetValueMap() which
* returns an implementation of this type that maps numbers to enum values.
*/
public interface EnumLiteMap<T extends EnumLite> {
T findValueByNumber(int number);
}
/**
* Helper method for implementing {@link Message#hashCode()} for longs.
* @see Long#hashCode()
*/
public static int hashLong(long n) {
return (int) (n ^ (n >>> 32));
}
/**
* Helper method for implementing {@link Message#hashCode()} for
* booleans.
* @see Boolean#hashCode()
*/
public static int hashBoolean(boolean b) {
return b ? 1231 : 1237;
}
/**
* Helper method for implementing {@link Message#hashCode()} for enums.
* <p>
* This is needed because {@link java.lang.Enum#hashCode()} is final, but we
* need to use the field number as the hash code to ensure compatibility
* between statically and dynamically generated enum objects.
*/
public static int hashEnum(EnumLite e) {
return e.getNumber();
}
/**
* Helper method for implementing {@link Message#hashCode()} for
* enum lists.
*/
public static int hashEnumList(List<? extends EnumLite> list) {
int hash = 1;
for (EnumLite e : list) {
hash = 31 * hash + hashEnum(e);
}
return hash;
}
/**
* Helper method for implementing {@link Message#equals(Object)} for bytes field.
*/
public static boolean equals(List<byte[]> a, List<byte[]> b) {
if (a.size() != b.size()) return false;
for (int i = 0; i < a.size(); ++i) {
if (!Arrays.equals(a.get(i), b.get(i))) {
return false;
}
}
return true;
}
/**
* Helper method for implementing {@link Message#hashCode()} for bytes field.
*/
public static int hashCode(List<byte[]> list) {
int hash = 1;
for (byte[] bytes : list) {
hash = 31 * hash + hashCode(bytes);
}
return hash;
}
/**
* Helper method for implementing {@link Message#hashCode()} for bytes field.
*/
public static int hashCode(byte[] bytes) {
// The hash code for a byte array should be the same as the hash code for a
// ByteString with the same content. This is to ensure that the generated
// hashCode() method will return the same value as the pure reflection
// based hashCode() method.
return LiteralByteString.hashCode(bytes);
}
/**
* Helper method for implementing {@link Message#equals(Object)} for bytes
* field.
*/
public static boolean equalsByteBuffer(ByteBuffer a, ByteBuffer b) {
if (a.capacity() != b.capacity()) {
return false;
}
// ByteBuffer.equals() will only compare the remaining bytes, but we want to
// compare all the content.
return a.duplicate().clear().equals(b.duplicate().clear());
}
/**
* Helper method for implementing {@link Message#equals(Object)} for bytes
* field.
*/
public static boolean equalsByteBuffer(
List<ByteBuffer> a, List<ByteBuffer> b) {
if (a.size() != b.size()) {
return false;
}
for (int i = 0; i < a.size(); ++i) {
if (!equalsByteBuffer(a.get(i), b.get(i))) {
return false;
}
}
return true;
}
/**
* Helper method for implementing {@link Message#hashCode()} for bytes
* field.
*/
public static int hashCodeByteBuffer(List<ByteBuffer> list) {
int hash = 1;
for (ByteBuffer bytes : list) {
hash = 31 * hash + hashCodeByteBuffer(bytes);
}
return hash;
}
private static final int DEFAULT_BUFFER_SIZE = 4096;
/**
* Helper method for implementing {@link Message#hashCode()} for bytes
* field.
*/
public static int hashCodeByteBuffer(ByteBuffer bytes) {
if (bytes.hasArray()) {
// Fast path.
int h = LiteralByteString.hashCode(bytes.capacity(), bytes.array(),
bytes.arrayOffset(), bytes.capacity());
return h == 0 ? 1 : h;
} else {
// Read the data into a temporary byte array before calculating the
// hash value.
final int bufferSize = bytes.capacity() > DEFAULT_BUFFER_SIZE
? DEFAULT_BUFFER_SIZE : bytes.capacity();
final byte[] buffer = new byte[bufferSize];
final ByteBuffer duplicated = bytes.duplicate();
duplicated.clear();
int h = bytes.capacity();
while (duplicated.remaining() > 0) {
final int length = duplicated.remaining() <= bufferSize ?
duplicated.remaining() : bufferSize;
duplicated.get(buffer, 0, length);
h = LiteralByteString.hashCode(h, buffer, 0, length);
}
return h == 0 ? 1 : h;
}
}
@SuppressWarnings("unchecked")
public static <T extends MessageLite> T getDefaultInstance(Class<T> clazz) {
try {
Method method = clazz.getMethod("getDefaultInstance");
return (T) method.invoke(method);
} catch (Exception e) {
throw new RuntimeException(
"Failed to get default instance for " + clazz, e);
}
}
/**
* An empty byte array constant used in generated code.
*/
public static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
/**
* An empty byte array constant used in generated code.
*/
public static final ByteBuffer EMPTY_BYTE_BUFFER =
ByteBuffer.wrap(EMPTY_BYTE_ARRAY);
/** An empty coded input stream constant used in generated code. */
public static final CodedInputStream EMPTY_CODED_INPUT_STREAM =
CodedInputStream.newInstance(EMPTY_BYTE_ARRAY);
/**
* Provides an immutable view of List<T> around a List<F>.
*
* Protobuf internal. Used in protobuf generated code only.
*/
public static class ListAdapter<F, T> extends AbstractList<T> {
/**
* Convert individual elements of the List from F to T.
*/
public interface Converter<F, T> {
T convert(F from);
}
private final List<F> fromList;
private final Converter<F, T> converter;
public ListAdapter(List<F> fromList, Converter<F, T> converter) {
this.fromList = fromList;
this.converter = converter;
}
@Override
public T get(int index) {
return converter.convert(fromList.get(index));
}
@Override
public int size() {
return fromList.size();
}
}
/**
* Wrap around a Map<K, RealValue> and provide a Map<K, V> interface.
*/
public static class MapAdapter<K, V, RealValue> extends AbstractMap<K, V> {
/**
* An interface used to convert between two types.
*/
public interface Converter<A, B> {
B doForward(A object);
A doBackward(B object);
}
public static <T extends EnumLite> Converter<Integer, T> newEnumConverter(
final EnumLiteMap<T> enumMap, final T unrecognizedValue) {
return new Converter<Integer, T>() {
public T doForward(Integer value) {
T result = enumMap.findValueByNumber(value);
return result == null ? unrecognizedValue : result;
}
public Integer doBackward(T value) {
return value.getNumber();
}
};
}
private final Map<K, RealValue> realMap;
private final Converter<RealValue, V> valueConverter;
public MapAdapter(Map<K, RealValue> realMap,
Converter<RealValue, V> valueConverter) {
this.realMap = realMap;
this.valueConverter = valueConverter;
}
@SuppressWarnings("unchecked")
@Override
public V get(Object key) {
RealValue result = realMap.get(key);
if (result == null) {
return null;
}
return valueConverter.doForward(result);
}
@Override
public V put(K key, V value) {
RealValue oldValue = realMap.put(key, valueConverter.doBackward(value));
if (oldValue == null) {
return null;
}
return valueConverter.doForward(oldValue);
}
@Override
public Set<java.util.Map.Entry<K, V>> entrySet() {
return new SetAdapter(realMap.entrySet());
}
private class SetAdapter extends AbstractSet<Map.Entry<K, V>> {
private final Set<Map.Entry<K, RealValue>> realSet;
public SetAdapter(Set<Map.Entry<K, RealValue>> realSet) {
this.realSet = realSet;
}
@Override
public Iterator<java.util.Map.Entry<K, V>> iterator() {
return new IteratorAdapter(realSet.iterator());
}
@Override
public int size() {
return realSet.size();
}
}
private class IteratorAdapter implements Iterator<Map.Entry<K, V>> {
private final Iterator<Map.Entry<K, RealValue>> realIterator;
public IteratorAdapter(
Iterator<Map.Entry<K, RealValue>> realIterator) {
this.realIterator = realIterator;
}
@Override
public boolean hasNext() {
return realIterator.hasNext();
}
@Override
public java.util.Map.Entry<K, V> next() {
return new EntryAdapter(realIterator.next());
}
@Override
public void remove() {
realIterator.remove();
}
}
private class EntryAdapter implements Map.Entry<K, V> {
private final Map.Entry<K, RealValue> realEntry;
public EntryAdapter(Map.Entry<K, RealValue> realEntry) {
this.realEntry = realEntry;
}
@Override
public K getKey() {
return realEntry.getKey();
}
@Override
public V getValue() {
return valueConverter.doForward(realEntry.getValue());
}
@Override
public V setValue(V value) {
RealValue oldValue = realEntry.setValue(
valueConverter.doBackward(value));
if (oldValue == null) {
return null;
}
return valueConverter.doForward(oldValue);
}
}
}
/**
* Extends {@link List} to add the capability to make the list immutable and inspect if it is
* modifiable.
*/
public static interface ProtobufList<E> extends List<E> {
/**
* Makes this list immutable. All subsequent modifications will throw an
* {@link UnsupportedOperationException}.
*/
void makeImmutable();
/**
* Returns whether this list can be modified via the publicly accessible {@link List} methods.
*/
boolean isModifiable();
}
/**
* A {@link java.util.List} implementation that avoids boxing the elements into Integers if
* possible. Does not support null elements.
*/
public static interface IntList extends ProtobufList<Integer> {
/**
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
*/
int getInt(int index);
/**
* Like {@link #add(Integer)} but more efficient in that it doesn't box the element.
*/
void addInt(int element);
/**
* Like {@link #set(int, Integer)} but more efficient in that it doesn't box the element.
*/
int setInt(int index, int element);
}
/**
* A {@link java.util.List} implementation that avoids boxing the elements into Booleans if
* possible. Does not support null elements.
*/
public static interface BooleanList extends ProtobufList<Boolean> {
/**
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
*/
boolean getBoolean(int index);
/**
* Like {@link #add(Boolean)} but more efficient in that it doesn't box the element.
*/
void addBoolean(boolean element);
/**
* Like {@link #set(int, Boolean)} but more efficient in that it doesn't box the element.
*/
boolean setBoolean(int index, boolean element);
}
/**
* A {@link java.util.List} implementation that avoids boxing the elements into Longs if
* possible. Does not support null elements.
*/
public static interface LongList extends ProtobufList<Long> {
/**
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
*/
long getLong(int index);
/**
* Like {@link #add(Long)} but more efficient in that it doesn't box the element.
*/
void addLong(long element);
/**
* Like {@link #set(int, Long)} but more efficient in that it doesn't box the element.
*/
long setLong(int index, long element);
}
/**
* A {@link java.util.List} implementation that avoids boxing the elements into Doubles if
* possible. Does not support null elements.
*/
public static interface DoubleList extends ProtobufList<Double> {
/**
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
*/
double getDouble(int index);
/**
* Like {@link #add(Double)} but more efficient in that it doesn't box the element.
*/
void addDouble(double element);
/**
* Like {@link #set(int, Double)} but more efficient in that it doesn't box the element.
*/
double setDouble(int index, double element);
}
/**
* A {@link java.util.List} implementation that avoids boxing the elements into Floats if
* possible. Does not support null elements.
*/
public static interface FloatList extends ProtobufList<Float> {
/**
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
*/
float getFloat(int index);
/**
* Like {@link #add(Float)} but more efficient in that it doesn't box the element.
*/
void addFloat(float element);
/**
* Like {@link #set(int, Float)} but more efficient in that it doesn't box the element.
*/
float setFloat(int index, float element);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.typeCook;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiDiamondTypeUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.typeCook.deductive.PsiTypeVariableFactory;
import com.intellij.util.IncorrectOperationException;
import java.util.HashSet;
import java.util.Set;
/**
* Created by IntelliJ IDEA.
* User: db
* Date: 30.07.2003
* Time: 18:57:30
* To change this template use Options | File Templates.
*/
public class Util {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.typeCook.Util");
public static PsiType createArrayType(PsiType theType, int level) {
while (level-- > 0) {
theType = theType.createArrayType();
}
return theType;
}
public static PsiClassType.ClassResolveResult resolveType(PsiType type) {
final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(type);
final PsiClass aClass = resolveResult.getElement();
if (aClass instanceof PsiAnonymousClass) {
final PsiClassType baseClassType = ((PsiAnonymousClass)aClass).getBaseClassType();
return resolveType(resolveResult.getSubstitutor().substitute(baseClassType));
}
return resolveResult;
}
public static PsiType normalize(PsiType t, boolean objectBottom) {
if (t instanceof PsiArrayType) {
PsiType normType = normalize(((PsiArrayType)t).getComponentType(), objectBottom);
return normType == null ? null : normType.createArrayType();
}
else if (t instanceof PsiClassType) {
PsiClassType.ClassResolveResult result = resolveType(t);
if (result == null) {
return null;
}
PsiClass aclass = result.getElement();
PsiSubstitutor subst = result.getSubstitutor();
PsiManager manager = aclass.getManager();
PsiSubstitutor newbst = PsiSubstitutor.EMPTY;
boolean anyBottom = false;
for (PsiTypeParameter typeParameter : PsiUtil.typeParametersIterable(aclass)) {
PsiType p = subst.substitute(typeParameter);
if (p != null) {
PsiType pp = normalize(p, objectBottom);
if (pp == null) {
return null;
}
if (pp == Bottom.BOTTOM || (objectBottom && pp.getCanonicalText().equals(CommonClassNames.JAVA_LANG_OBJECT))) {
anyBottom = true;
}
newbst = newbst.put(typeParameter, pp);
}
else {
anyBottom = true;
}
}
if (anyBottom || newbst == PsiSubstitutor.EMPTY) {
newbst = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory().createRawSubstitutor(aclass);
}
return JavaPsiFacade.getInstance(manager.getProject()).getElementFactory().createType(aclass, newbst);
}
else {
return t;
}
}
public static boolean isRaw(PsiType t, final Settings settings) {
return isRaw(t, settings, true);
}
private static boolean isRaw(PsiType t, final Settings settings, final boolean upper) {
if (t instanceof PsiClassType) {
final PsiClassType.ClassResolveResult resolveResult = resolveType(t);
if (resolveResult.getElement() == null) {
return false;
}
if (PsiClassType.isRaw(resolveResult)) {
return true;
}
final PsiSubstitutor subst = resolveResult.getSubstitutor();
final PsiClass element = resolveResult.getElement();
final PsiManager manager = element.getManager();
if (settings.cookObjects() && upper &&
t.equals(PsiType.getJavaLangObject(manager, GlobalSearchScope.allScope(manager.getProject())))) {
return true;
}
for (PsiTypeParameter parameter : PsiUtil.typeParametersIterable(element)) {
final PsiType actual = subst.substitute(parameter);
if (isRaw(actual, settings, false)) return true;
}
return false;
}
else if (t instanceof PsiArrayType) {
return !settings.preserveRawArrays() && isRaw(((PsiArrayType)t).getComponentType(), settings, upper);
}
return false;
}
/**
* convert external raw types to types explicitly parameterized by Bottom
*/
public static PsiType banalize(final PsiType t) {
if (t instanceof PsiClassType) {
final PsiClassType.ClassResolveResult result = resolveType(t);
final PsiClass theClass = result.getElement();
if (theClass == null) {
return t;
}
final PsiSubstitutor theSubst = result.getSubstitutor();
final PsiManager theManager = theClass.getManager();
PsiSubstitutor subst = PsiSubstitutor.EMPTY;
for (final PsiTypeParameter theParm : theSubst.getSubstitutionMap().keySet()) {
final PsiType actualType = theSubst.substitute(theParm);
if (actualType == null /*|| actualType instanceof PsiWildcardType*/) {
subst = subst.put(theParm, Bottom.BOTTOM);
}
else if (actualType instanceof PsiWildcardType) {
final PsiWildcardType wctype = (PsiWildcardType)actualType;
final PsiType bound = wctype.getBound();
if (bound == null) {
subst = subst.put(theParm, actualType);
}
else {
final PsiType banabound = banalize(bound);
subst = subst.put(theParm, wctype.isExtends()
? PsiWildcardType.createExtends(theManager, banabound)
: PsiWildcardType.createSuper(theManager, banabound));
}
}
else {
final PsiType banType = banalize(actualType);
if (banType == null) {
return t;
}
subst = subst.put(theParm, banType);
}
}
return JavaPsiFacade.getInstance(theManager.getProject()).getElementFactory().createType(theClass, subst);
}
else if (t instanceof PsiArrayType) {
return banalize(((PsiArrayType)t).getComponentType()).createArrayType();
}
return t;
}
public static PsiSubstitutor composeSubstitutors(PsiSubstitutor f, PsiSubstitutor g) {
if (f == PsiSubstitutor.EMPTY) {
return g;
}
PsiSubstitutor subst = PsiSubstitutor.EMPTY;
Set<PsiTypeParameter> base = g.getSubstitutionMap().keySet();
for (PsiTypeParameter p : base) {
PsiType type = g.substitute(p);
subst = subst.put(p, type == null ? null : f.substitute(type));
}
return subst;
}
public static boolean bindsTypeParameters(PsiType t, Set<PsiTypeParameter> params) {
if (t instanceof PsiWildcardType) {
final PsiWildcardType wct = ((PsiWildcardType)t);
final PsiType bound = wct.getBound();
return bound != null && wct.isExtends() && bindsTypeParameters(bound, params);
}
final PsiClassType.ClassResolveResult result = resolveType(t);
final PsiClass theClass = result.getElement();
final PsiSubstitutor theSubst = result.getSubstitutor();
if (theClass == null) {
return false;
}
if (theClass instanceof PsiTypeParameter) {
return params == null || params.contains(theClass);
}
else if (theClass.hasTypeParameters()) {
for (PsiTypeParameter parameter : PsiUtil.typeParametersIterable(theClass)) {
PsiType bound = theSubst.substitute(parameter);
if (bound != null && bindsTypeParameters(bound, params)) {
return true;
}
}
}
return false;
}
public static PsiType getType(PsiElement element) {
if (element instanceof PsiVariable) {
return ((PsiVariable)element).getType();
}
else if (element instanceof PsiExpression) {
return ((PsiExpression)element).getType();
}
else if (element instanceof PsiMethod) {
return ((PsiMethod)element).getReturnType();
}
return null;
}
public static PsiType createParameterizedType(final PsiType t, final PsiTypeVariableFactory factory, final PsiElement context) {
return createParameterizedType(t, factory, true, context);
}
public static PsiType createParameterizedType(final PsiType t, final PsiTypeVariableFactory factory) {
return createParameterizedType(t, factory, true, null);
}
private static PsiType createParameterizedType(final PsiType t,
final PsiTypeVariableFactory factory,
final boolean upper,
final PsiElement context) {
if (t == null || (upper && t.getCanonicalText().equals(CommonClassNames.JAVA_LANG_OBJECT))) {
return factory.create(context);
}
if (t instanceof PsiClassType) {
final PsiClassType.ClassResolveResult result = resolveType(t);
final PsiSubstitutor aSubst = result.getSubstitutor();
final PsiClass aClass = result.getElement();
PsiSubstitutor theSubst = PsiSubstitutor.EMPTY;
final Set<PsiTypeVariable> cluster = new HashSet<PsiTypeVariable>();
for (final PsiTypeParameter parm : aSubst.getSubstitutionMap().keySet()) {
final PsiType type = createParameterizedType(aSubst.substitute(parm), factory, false, context);
if (type instanceof PsiTypeVariable) {
cluster.add((PsiTypeVariable)type);
}
theSubst = theSubst.put(parm, type);
}
if (cluster.size() > 1) {
factory.registerCluster(cluster);
}
return JavaPsiFacade.getInstance(aClass.getProject()).getElementFactory().createType(aClass, theSubst);
}
else if (t instanceof PsiArrayType) {
return createParameterizedType(((PsiArrayType)t).getComponentType(), factory, upper, context).createArrayType();
}
return t;
}
public static boolean bindsTypeVariables(final PsiType t) {
if (t == null) {
return false;
}
if (t instanceof PsiTypeVariable) {
return true;
}
if (t instanceof PsiArrayType) {
return bindsTypeVariables(((PsiArrayType)t).getComponentType());
}
if (t instanceof PsiWildcardType) {
return bindsTypeVariables(((PsiWildcardType)t).getBound());
}
if (t instanceof PsiIntersectionType) {
final PsiType[] conjuncts = ((PsiIntersectionType)t).getConjuncts();
for (PsiType conjunct : conjuncts) {
if (bindsTypeVariables(conjunct)) return true;
}
return false;
}
final PsiClassType.ClassResolveResult result = resolveType(t);
if (result.getElement() != null) {
final PsiSubstitutor subst = result.getSubstitutor();
for (final PsiType psiType : subst.getSubstitutionMap().values()) {
if (bindsTypeVariables(psiType)) {
return true;
}
}
}
return false;
}
public static void changeType(final PsiElement element, final PsiType type) {
try {
if (element instanceof PsiTypeCastExpression) {
final PsiTypeCastExpression cast = ((PsiTypeCastExpression)element);
cast.getCastType().replace(JavaPsiFacade.getInstance(cast.getProject()).getElementFactory().createTypeElement(type));
}
else if (element instanceof PsiVariable) {
final PsiVariable field = ((PsiVariable)element);
field.normalizeDeclaration();
field.getTypeElement().replace(JavaPsiFacade.getInstance(field.getProject()).getElementFactory().createTypeElement(type));
}
else if (element instanceof PsiMethod) {
final PsiMethod method = ((PsiMethod)element);
method.getReturnTypeElement().replace(JavaPsiFacade.getInstance(method.getProject()).getElementFactory().createTypeElement(type));
}
else if (element instanceof PsiNewExpression) {
final PsiNewExpression newx = (PsiNewExpression)element;
final PsiClassType.ClassResolveResult result = resolveType(type);
if (result == null) {
return;
}
final PsiSubstitutor subst = result.getSubstitutor();
final PsiTypeParameter[] parms = result.getElement().getTypeParameters();
if (parms.length > 0 && subst.substitute(parms[0]) != null) {
PsiJavaCodeReferenceElement classReference = newx.getClassOrAnonymousClassReference();
PsiReferenceParameterList list = classReference.getParameterList();
if (list == null) {
return;
}
final PsiElementFactory factory = JavaPsiFacade.getInstance(newx.getProject()).getElementFactory();
PsiTypeElement[] elements = list.getTypeParameterElements();
for (PsiTypeElement element1 : elements) {
element1.delete();
}
for (PsiTypeParameter parm : parms) {
PsiType aType = subst.substitute(parm);
if (aType instanceof PsiWildcardType) {
aType = ((PsiWildcardType)aType).getBound();
}
list
.add(factory.createTypeElement(aType == null ? PsiType.getJavaLangObject(list.getManager(), list.getResolveScope()) : aType));
}
if (PsiDiamondTypeUtil.canCollapseToDiamond(newx, newx, newx.getType())) {
PsiDiamondTypeUtil.replaceExplicitWithDiamond(list);
}
}
}
else {
LOG.error("Unexpected element type " + element.getClass().getName());
}
}
catch (IncorrectOperationException e) {
LOG.error("Incorrect Operation Exception thrown in CastRole.\n");
}
}
}
| |
package com.codepix.main;
import twitter4j.Twitter;
import twitter4j.User;
import twitter4j.auth.AccessToken;
import twitter4j.auth.RequestToken;
import com.codepix.utilz.GlobalMethods;
import com.facebook.Session;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.StrictMode;
import android.provider.MediaStore.MediaColumns;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.TabHost;
public class DashboardActivity extends FragmentActivity {
private static View createTabView(Context context, int id) {
View view = LayoutInflater.from(context).inflate(R.layout.custom_tab, null, false);
ImageButton tv = (ImageButton) view.findViewById(R.id.tabTitleText);
tv.setFocusable(true);
view.setFocusable(true);
tv.setBackgroundDrawable(context.getResources().getDrawable(id));
//tv.setText(tabText);
return view;
}
// Fragment TabHost as mTabHost
private TabHost tHost;
private HomeFragment homeFragment;
private CameraFragment cameraFragment;
private ExploreFragment exploreFragment;
private SettingsFragment settingsFragment;
static String TWITTER_CONSUMER_KEY = "BeN61RJOYUbuQIdFqdZYA"; // place your cosumer key here
static String TWITTER_CONSUMER_SECRET = "CpIPrD5l7t7tB54WSmNHfxE0tGEMkM1HABimt7IyE"; // place your consumer secret here
// Preference Constants
static String PREFERENCE_NAME = "twitter_oauth";
static final String PREF_KEY_OAUTH_TOKEN = "oauth_token";
static final String PREF_KEY_OAUTH_SECRET = "oauth_token_secret";
static final String PREF_KEY_TWITTER_LOGIN = "isTwitterLogedIn";
static final String TWITTER_CALLBACK_URL = "codepixconnect://twitter_connect";
// Twitter oauth urls
static final String URL_TWITTER_AUTH = "auth_url";
static final String URL_TWITTER_OAUTH_VERIFIER = "oauth_verifier";
static final String URL_TWITTER_OAUTH_TOKEN = "oauth_token";
private static final int TWITTER_AUTH = 103;
protected static final int SELECT_PHOTO_FROM_CAMERA = 1010;
protected static final int SELECT_PHOTO = 1000;
static Twitter twitter;
static RequestToken requestToken;
static AccessToken accessToken;
private SharedPreferences mSharedPreferences;
private String filePath;
static String verifier;
private void doHandleImage()
{
CharSequence[] items={"Take Picture","Select From Gallery"};
new AlertDialog.Builder(this)
.setSingleChoiceItems(items, 0, null)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
int selectedPosition = ((AlertDialog)dialog).getListView().getCheckedItemPosition();
// Do something useful withe the position of the selected radio button
if(selectedPosition==0)
{
Intent photoPickerIntent = new Intent(DashboardActivity.this,ImageEffectsActivity.class);
startActivity(photoPickerIntent);
//startActivityForResult(photoPickerIntent, SELECT_PHOTO_FROM_CAMERA);
}
else if(selectedPosition==1)
{
Intent photoPickerIntent1 = new Intent(Intent.ACTION_PICK);
photoPickerIntent1.setType("image/*");
startActivityForResult(photoPickerIntent1, SELECT_PHOTO);
}
}
})
.setNegativeButton("CANCEL", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
}
})
.show();
}
public String getPath(Uri uri) {
String[] projection = { MediaColumns.DATA };
Cursor cursor = managedQuery(uri, projection, null, null, null);
int column_index = cursor
.getColumnIndexOrThrow(MediaColumns.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
}
private void getTwitterAccessToken(Intent intent) {
// TODO Auto-generated method stub
System.out.println("test1");
if (!isTwitterLoggedInAlready()) {
//Uri uri =intent.getData();
//System.out.println("test2"+uri.toString());
// if (uri != null && uri.toString().startsWith(TWITTER_CALLBACK_URL)) {
// oAuth verifier
verifier = intent.getExtras().getString("oauth_verifier");
try {
Thread thread = new Thread(new Runnable(){
@Override
public void run() {
try {
System.out.println("test3");
// Get the access token
DashboardActivity.accessToken = twitter.getOAuthAccessToken(
requestToken, verifier);
// Shared Preferences
Editor e = mSharedPreferences.edit();
// After getting access token, access token secret
// store them in application preferences
e.putString(PREF_KEY_OAUTH_TOKEN, accessToken.getToken());
e.putString(PREF_KEY_OAUTH_SECRET,
accessToken.getTokenSecret());
// Store login status - true
e.putBoolean(PREF_KEY_TWITTER_LOGIN, true);
e.commit(); // save changes
System.out.println("test4");
Log.e("Twitter OAuth Token", "> " + accessToken.getToken());
// Getting user details from twitter
// For now i am getting his name only
long userID = accessToken.getUserId();
User user = twitter.showUser(userID);
String username = user.getName();
String profileimage=user.getProfileImageURL();
String name=user.getScreenName();
System.out.println( "username: " + username + "\n profileimage: " + profileimage+"\nname:-"+name);
} catch (Exception e) {
e.printStackTrace();
}
}
});
thread.start();
//Toast.makeText(getApplicationContext(), "username: " + username + "\n profileimage: " + profileimage+"\nname:-"+name, Toast.LENGTH_LONG).show();
} catch (Exception e) {
// Check log for login errors
Log.e("Twitter Login Error", "> " + e.getMessage());
e.printStackTrace();
}
}
}
private boolean isTwitterLoggedInAlready() {
// return twitter login status from Shared Preferences
return mSharedPreferences.getBoolean(PREF_KEY_TWITTER_LOGIN, false);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch(requestCode) {
case TWITTER_AUTH:
if (resultCode == Activity.RESULT_OK)
{
String oauthVerifier = (String) data.getExtras().get("oauth_verifier");
System.out.println("oauthVerifier:-"+oauthVerifier);
getTwitterAccessToken(data);
}
break;
case SELECT_PHOTO:
if(resultCode ==RESULT_OK){
Uri selectedImage = data.getData();
filePath= getPath(selectedImage);
Intent intent=new Intent(DashboardActivity.this,ImageEffectsActivity.class);
intent.putExtra("filePath", filePath);
intent.putExtra("pictureFromCamera", false);
startActivity(intent);
finish();
// Toast.makeText(getActivity(), "File://"+filePath, Toast.LENGTH_LONG).show();
/*InputStream imageStream = null;
try {
imageStream =getContentResolver().openInputStream(selectedImage);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Bitmap yourSelectedImage = BitmapFactory.decodeStream(imageStream);*/
/*imagView.setImageBitmap(yourSelectedImage);
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(200,200);
params.addRule(RelativeLayout.ALIGN_PARENT_LEFT,1);
imagView.setLayoutParams(params);*/
}
break;
case SELECT_PHOTO_FROM_CAMERA:
//Toast.makeText(this, "file"+filePath, Toast.LENGTH_LONG).show();
if(resultCode ==RESULT_OK){
filePath=data.getStringExtra("filepath");
Intent intent=new Intent(DashboardActivity.this,ImageEffectsActivity.class);
intent.putExtra("filePath", filePath);
intent.putExtra("pictureFromCamera", true);
startActivity(intent);
finish();
/* imagView.setImageURI(Uri.fromFile(new File(filePath)));
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(200,200);
params.addRule(RelativeLayout.ALIGN_PARENT_LEFT,1);
imagView.setLayoutParams(params);
imagView.setRotation(90);*/
}
break;
default:
if(GlobalMethods.checkInternetConnection(getApplicationContext()))
{
Session.getActiveSession().onActivityResult(this, requestCode, resultCode, data);
}
else
{
GlobalMethods.showMessage(getApplicationContext(), getString(R.string.internet_error));
}
break;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
/* StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder()
.detectAll()
.penaltyLog()
.penaltyDeath()
.build()); */
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_dashboard);
tHost = (TabHost) findViewById(android.R.id.tabhost);
tHost.setup();
mSharedPreferences = getApplicationContext().getSharedPreferences(
"MyPref", 0);
TabHost.OnTabChangeListener tabChangeListener = new TabHost.OnTabChangeListener() {
@Override
public void onTabChanged(String tabId) {
//System.out.println("tabId"+tabId);
android.support.v4.app.FragmentManager fm = getSupportFragmentManager();
homeFragment = (HomeFragment) fm.findFragmentByTag("home");
//cameraFragment=(CameraFragment) fm.findFragmentByTag("camera");
exploreFragment=(ExploreFragment) fm.findFragmentByTag("explore");
settingsFragment=(SettingsFragment) fm.findFragmentByTag("settings");
android.support.v4.app.FragmentTransaction ft = fm.beginTransaction();
//ft.addToBackStack(null);
/** Detaches the homeFragment if exists */
if(homeFragment!=null)
ft.detach(homeFragment);
// if(cameraFragment!=null)
// ft.detach(homeFragment);
if(exploreFragment!=null)
ft.detach(exploreFragment);
if(settingsFragment!=null)
ft.detach(settingsFragment);
// GlobalMethods.showMessage(getApplicationContext(), "current tab"+tabId);
if(tabId.equalsIgnoreCase("home")){
if(homeFragment==null){
// *//** Create AndroidFragment and adding to fragmenttransaction *//*
ft.add(R.id.realtabcontent,new HomeFragment(), "home");
}else{
// *//** Bring to the front, if already exists in the fragmenttransaction *//*
ft.attach(homeFragment);
//ft.replace(R.id.realtabcontent, Fragment.instantiate(DashboardFragmentActivity.this, homeFragment.getClass().getName()));
//ft.replace(R.id.realtabcontent,homeFragment);
}
}
if(tabId.equalsIgnoreCase("camera")){
doHandleImage();
/*if(cameraFragment==null){
// *//** Create AndroidFragment and adding to fragmenttransaction *//*
ft.add(R.id.realtabcontent,new CameraFragment(), "camera");
}else{
// *//** Bring to the front, if already exists in the fragmenttransaction *//*
// ft.attach(cameraFragment);
ft.replace(R.id.realtabcontent,cameraFragment);
//ft.replace(R.id.realtabcontent, Fragment.instantiate(DashboardFragmentActivity.this, homeFragment.getClass().getName()));
}*/
}
if(tabId.equalsIgnoreCase("explore")==true){
System.out.println("Explore true");
if(exploreFragment==null){
// /** Create AndroidFragment and adding to fragmenttransaction */
ft.add(R.id.realtabcontent,new ExploreFragment(), "explore");
}else{
// /** Bring to the front, if already exists in the fragmenttransaction */
ft.attach(exploreFragment);
//ft.replace(R.id.realtabcontent,exploreFragment);
//ft.replace(R.id.realtabcontent, Fragment.instantiate(DashboardFragmentActivity.this, homeFragment.getClass().getName()));
}
}
if(tabId.equalsIgnoreCase("settings")){
System.out.println("Explore false");
if(settingsFragment==null){
// *//** Create AndroidFragment and adding to fragmenttransaction *//*
ft.add(R.id.realtabcontent,new SettingsFragment(), "settings");
}else{
// *//** Bring to the front, if already exists in the fragmenttransaction *//*
ft.attach(settingsFragment);
// ft.replace(R.id.realtabcontent,settingsFragment);
//ft.replace(R.id.realtabcontent, Fragment.instantiate(DashboardFragmentActivity.this, homeFragment.getClass().getName()));
}
}
// ft.commitAllowingStateLoss();
ft.commit();
}
};
/** Setting tabchangelistener for the tab */
tHost.setOnTabChangedListener(tabChangeListener);
View tabView = createTabView(this, R.drawable.icon_logo_config);
/** Defining tab builder for home tab */
TabHost.TabSpec tSpechome = tHost.newTabSpec("logo");
tSpechome.setIndicator(tabView);
tSpechome.setContent(new DummyTabContent(getBaseContext()));
tHost.addTab(tSpechome);
/** Defining tab builder for intro tab */
TabHost.TabSpec tSpecintro = tHost.newTabSpec("home");
tabView = createTabView(this, R.drawable.icon_home_config);
tSpecintro.setIndicator(tabView);
tSpecintro.setContent(new DummyTabContent(getBaseContext()));
tHost.addTab(tSpecintro);
/** Defining tab builder for updates tab */
TabHost.TabSpec tSpecupdates = tHost.newTabSpec("camera");
tabView = createTabView(this, R.drawable.icon_snap_config);
tSpecupdates.setIndicator(tabView);
tSpecupdates.setContent(new DummyTabContent(getBaseContext()));
tHost.addTab(tSpecupdates);
/** Defining tab builder for history tab */
TabHost.TabSpec tSpechistory = tHost.newTabSpec("explore");
tabView = createTabView(this, R.drawable.icon_explore_config);
tSpechistory.setIndicator(tabView);
tSpechistory.setContent(new DummyTabContent(getBaseContext()));
tHost.addTab(tSpechistory);
/** Defining tab builder for settings tab */
TabHost.TabSpec tSpecsettings = tHost.newTabSpec("settings");
tabView = createTabView(this, R.drawable.icon_settings_config);
tSpecsettings.setIndicator(tabView);
tSpecsettings.setContent(new DummyTabContent(getBaseContext()));
tHost.addTab(tSpecsettings);
/* *//** Set tab when select from Menubar *//*
Intent intent=getIntent();
String tab=intent.getStringExtra("tab");
if(tab!=null)
tHost.setCurrentTabByTag(tab);
*/
tHost.setCurrentTabByTag("home");
// Intent i = new Intent("com.codepix.main.CodepixService");
// startService(i);
}
@Override
protected void onDestroy() {
super.onDestroy();
finish();
unbindDrawables(findViewById(R.id.rootView));
System.gc();
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
System.out.println("called onNewIntent");
Uri uri =intent.getData();
if (uri != null && uri.toString().startsWith(TWITTER_CALLBACK_URL))
getTwitterAccessToken(intent);
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onRestoreInstanceState(savedInstanceState);
String tHostName=savedInstanceState.getString("tHost");
tHost.setCurrentTabByTag(tHostName);
}
@Override
public void onSaveInstanceState(Bundle outState) {
// TODO Auto-generated method stub
super.onSaveInstanceState(outState);
outState.putString("tHost", tHost.getCurrentTabTag());
}
private void unbindDrawables(View view) {
if (view.getBackground() != null) {
view.getBackground().setCallback(null);
}
if (view instanceof ViewGroup) {
for (int i = 0; i < ((ViewGroup) view).getChildCount(); i++) {
unbindDrawables(((ViewGroup) view).getChildAt(i));
}
((ViewGroup) view).removeAllViews();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.server.remotetask;
import com.google.common.base.Ticker;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import com.google.common.collect.SetMultimap;
import com.google.common.net.HttpHeaders;
import com.google.common.net.MediaType;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.concurrent.SetThreadName;
import io.airlift.http.client.FullJsonResponseHandler.JsonResponse;
import io.airlift.http.client.HttpClient;
import io.airlift.http.client.HttpUriBuilder;
import io.airlift.http.client.Request;
import io.airlift.json.JsonCodec;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
import io.trino.Session;
import io.trino.execution.DynamicFiltersCollector.VersionedDynamicFilterDomains;
import io.trino.execution.FutureStateChange;
import io.trino.execution.Lifespan;
import io.trino.execution.NodeTaskMap.PartitionedSplitCountTracker;
import io.trino.execution.RemoteTask;
import io.trino.execution.ScheduledSplit;
import io.trino.execution.StateMachine.StateChangeListener;
import io.trino.execution.TaskId;
import io.trino.execution.TaskInfo;
import io.trino.execution.TaskSource;
import io.trino.execution.TaskState;
import io.trino.execution.TaskStatus;
import io.trino.execution.buffer.BufferInfo;
import io.trino.execution.buffer.OutputBuffers;
import io.trino.execution.buffer.PageBufferInfo;
import io.trino.metadata.Split;
import io.trino.operator.TaskStats;
import io.trino.server.DynamicFilterService;
import io.trino.server.TaskUpdateRequest;
import io.trino.sql.planner.PlanFragment;
import io.trino.sql.planner.plan.PlanNode;
import io.trino.sql.planner.plan.PlanNodeId;
import org.joda.time.DateTime;
import javax.annotation.concurrent.GuardedBy;
import java.net.URI;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static io.airlift.http.client.FullJsonResponseHandler.createFullJsonResponseHandler;
import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom;
import static io.airlift.http.client.Request.Builder.prepareDelete;
import static io.airlift.http.client.Request.Builder.preparePost;
import static io.airlift.http.client.StaticBodyGenerator.createStaticBodyGenerator;
import static io.trino.SystemSessionProperties.getMaxUnacknowledgedSplitsPerTask;
import static io.trino.execution.TaskInfo.createInitialTask;
import static io.trino.execution.TaskState.ABORTED;
import static io.trino.execution.TaskState.FAILED;
import static io.trino.execution.TaskStatus.failWith;
import static io.trino.server.remotetask.RequestErrorTracker.logError;
import static io.trino.util.Failures.toFailure;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
public final class HttpRemoteTask
implements RemoteTask
{
private static final Logger log = Logger.get(HttpRemoteTask.class);
private final TaskId taskId;
private final Session session;
private final String nodeId;
private final PlanFragment planFragment;
private final OptionalInt totalPartitions;
private final AtomicLong nextSplitId = new AtomicLong();
private final RemoteTaskStats stats;
private final TaskInfoFetcher taskInfoFetcher;
private final ContinuousTaskStatusFetcher taskStatusFetcher;
private final DynamicFiltersFetcher dynamicFiltersFetcher;
@GuardedBy("this")
private Future<?> currentRequest;
@GuardedBy("this")
private long currentRequestStartNanos;
@GuardedBy("this")
private final SetMultimap<PlanNodeId, ScheduledSplit> pendingSplits = HashMultimap.create();
private final int maxUnacknowledgedSplits;
@GuardedBy("this")
private volatile int pendingSourceSplitCount;
@GuardedBy("this")
private final SetMultimap<PlanNodeId, Lifespan> pendingNoMoreSplitsForLifespan = HashMultimap.create();
@GuardedBy("this")
// The keys of this map represent all plan nodes that have "no more splits".
// The boolean value of each entry represents whether the "no more splits" notification is pending delivery to workers.
private final Map<PlanNodeId, Boolean> noMoreSplits = new HashMap<>();
@GuardedBy("this")
private final AtomicReference<OutputBuffers> outputBuffers = new AtomicReference<>();
private final FutureStateChange<?> whenSplitQueueHasSpace = new FutureStateChange<>();
@GuardedBy("this")
private boolean splitQueueHasSpace = true;
@GuardedBy("this")
private OptionalInt whenSplitQueueHasSpaceThreshold = OptionalInt.empty();
private final boolean summarizeTaskInfo;
private final HttpClient httpClient;
private final Executor executor;
private final ScheduledExecutorService errorScheduledExecutor;
private final JsonCodec<TaskInfo> taskInfoCodec;
private final JsonCodec<TaskUpdateRequest> taskUpdateRequestCodec;
private final RequestErrorTracker updateErrorTracker;
private final AtomicBoolean needsUpdate = new AtomicBoolean(true);
private final AtomicBoolean sendPlan = new AtomicBoolean(true);
private final PartitionedSplitCountTracker partitionedSplitCountTracker;
private final AtomicBoolean started = new AtomicBoolean(false);
private final AtomicBoolean aborting = new AtomicBoolean(false);
public HttpRemoteTask(
Session session,
TaskId taskId,
String nodeId,
URI location,
PlanFragment planFragment,
Multimap<PlanNodeId, Split> initialSplits,
OptionalInt totalPartitions,
OutputBuffers outputBuffers,
HttpClient httpClient,
Executor executor,
ScheduledExecutorService updateScheduledExecutor,
ScheduledExecutorService errorScheduledExecutor,
Duration maxErrorDuration,
Duration taskStatusRefreshMaxWait,
Duration taskInfoUpdateInterval,
boolean summarizeTaskInfo,
JsonCodec<TaskStatus> taskStatusCodec,
JsonCodec<VersionedDynamicFilterDomains> dynamicFilterDomainsCodec,
JsonCodec<TaskInfo> taskInfoCodec,
JsonCodec<TaskUpdateRequest> taskUpdateRequestCodec,
PartitionedSplitCountTracker partitionedSplitCountTracker,
RemoteTaskStats stats,
DynamicFilterService dynamicFilterService)
{
requireNonNull(session, "session is null");
requireNonNull(taskId, "taskId is null");
requireNonNull(nodeId, "nodeId is null");
requireNonNull(location, "location is null");
requireNonNull(planFragment, "planFragment is null");
requireNonNull(totalPartitions, "totalPartitions is null");
requireNonNull(outputBuffers, "outputBuffers is null");
requireNonNull(httpClient, "httpClient is null");
requireNonNull(executor, "executor is null");
requireNonNull(taskStatusCodec, "taskStatusCodec is null");
requireNonNull(taskInfoCodec, "taskInfoCodec is null");
requireNonNull(taskUpdateRequestCodec, "taskUpdateRequestCodec is null");
requireNonNull(partitionedSplitCountTracker, "partitionedSplitCountTracker is null");
requireNonNull(stats, "stats is null");
try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
this.taskId = taskId;
this.session = session;
this.nodeId = nodeId;
this.planFragment = planFragment;
this.totalPartitions = totalPartitions;
this.outputBuffers.set(outputBuffers);
this.httpClient = httpClient;
this.executor = executor;
this.errorScheduledExecutor = errorScheduledExecutor;
this.summarizeTaskInfo = summarizeTaskInfo;
this.taskInfoCodec = taskInfoCodec;
this.taskUpdateRequestCodec = taskUpdateRequestCodec;
this.updateErrorTracker = new RequestErrorTracker(taskId, location, maxErrorDuration, errorScheduledExecutor, "updating task");
this.partitionedSplitCountTracker = requireNonNull(partitionedSplitCountTracker, "partitionedSplitCountTracker is null");
this.stats = stats;
for (Entry<PlanNodeId, Split> entry : requireNonNull(initialSplits, "initialSplits is null").entries()) {
ScheduledSplit scheduledSplit = new ScheduledSplit(nextSplitId.getAndIncrement(), entry.getKey(), entry.getValue());
pendingSplits.put(entry.getKey(), scheduledSplit);
}
pendingSourceSplitCount = planFragment.getPartitionedSources().stream()
.filter(initialSplits::containsKey)
.mapToInt(partitionedSource -> initialSplits.get(partitionedSource).size())
.sum();
maxUnacknowledgedSplits = getMaxUnacknowledgedSplitsPerTask(session);
List<BufferInfo> bufferStates = outputBuffers.getBuffers()
.keySet().stream()
.map(outputId -> new BufferInfo(outputId, false, 0, 0, PageBufferInfo.empty()))
.collect(toImmutableList());
TaskInfo initialTask = createInitialTask(taskId, location, nodeId, bufferStates, new TaskStats(DateTime.now(), null));
this.dynamicFiltersFetcher = new DynamicFiltersFetcher(
this::failTask,
taskId,
location,
taskStatusRefreshMaxWait,
dynamicFilterDomainsCodec,
executor,
httpClient,
maxErrorDuration,
errorScheduledExecutor,
stats,
dynamicFilterService);
this.taskStatusFetcher = new ContinuousTaskStatusFetcher(
this::failTask,
initialTask.getTaskStatus(),
taskStatusRefreshMaxWait,
taskStatusCodec,
dynamicFiltersFetcher,
executor,
httpClient,
maxErrorDuration,
errorScheduledExecutor,
stats);
this.taskInfoFetcher = new TaskInfoFetcher(
this::failTask,
initialTask,
httpClient,
taskInfoUpdateInterval,
taskInfoCodec,
maxErrorDuration,
summarizeTaskInfo,
executor,
updateScheduledExecutor,
errorScheduledExecutor,
stats);
taskStatusFetcher.addStateChangeListener(newStatus -> {
TaskState state = newStatus.getState();
if (state.isDone()) {
cleanUpTask();
}
else {
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
updateSplitQueueSpace();
}
});
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
updateSplitQueueSpace();
}
}
@Override
public TaskId getTaskId()
{
return taskId;
}
@Override
public String getNodeId()
{
return nodeId;
}
@Override
public TaskInfo getTaskInfo()
{
return taskInfoFetcher.getTaskInfo();
}
@Override
public TaskStatus getTaskStatus()
{
return taskStatusFetcher.getTaskStatus();
}
@Override
public void start()
{
try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
// to start we just need to trigger an update
started.set(true);
scheduleUpdate();
dynamicFiltersFetcher.start();
taskStatusFetcher.start();
taskInfoFetcher.start();
}
}
@Override
public synchronized void addSplits(Multimap<PlanNodeId, Split> splitsBySource)
{
requireNonNull(splitsBySource, "splitsBySource is null");
// only add pending split if not done
if (getTaskStatus().getState().isDone()) {
return;
}
boolean needsUpdate = false;
for (Entry<PlanNodeId, Collection<Split>> entry : splitsBySource.asMap().entrySet()) {
PlanNodeId sourceId = entry.getKey();
Collection<Split> splits = entry.getValue();
checkState(!noMoreSplits.containsKey(sourceId), "noMoreSplits has already been set for %s", sourceId);
int added = 0;
for (Split split : splits) {
if (pendingSplits.put(sourceId, new ScheduledSplit(nextSplitId.getAndIncrement(), sourceId, split))) {
added++;
}
}
if (planFragment.isPartitionedSources(sourceId)) {
pendingSourceSplitCount += added;
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
}
needsUpdate = true;
}
updateSplitQueueSpace();
if (needsUpdate) {
this.needsUpdate.set(true);
scheduleUpdate();
}
}
@Override
public synchronized void noMoreSplits(PlanNodeId sourceId)
{
if (noMoreSplits.containsKey(sourceId)) {
return;
}
noMoreSplits.put(sourceId, true);
needsUpdate.set(true);
scheduleUpdate();
}
@Override
public synchronized void noMoreSplits(PlanNodeId sourceId, Lifespan lifespan)
{
if (pendingNoMoreSplitsForLifespan.put(sourceId, lifespan)) {
needsUpdate.set(true);
scheduleUpdate();
}
}
@Override
public synchronized void setOutputBuffers(OutputBuffers newOutputBuffers)
{
if (getTaskStatus().getState().isDone()) {
return;
}
if (newOutputBuffers.getVersion() > outputBuffers.get().getVersion()) {
outputBuffers.set(newOutputBuffers);
needsUpdate.set(true);
scheduleUpdate();
}
}
@Override
public int getPartitionedSplitCount()
{
TaskStatus taskStatus = getTaskStatus();
if (taskStatus.getState().isDone()) {
return 0;
}
return getPendingSourceSplitCount() + taskStatus.getQueuedPartitionedDrivers() + taskStatus.getRunningPartitionedDrivers();
}
@Override
public int getQueuedPartitionedSplitCount()
{
TaskStatus taskStatus = getTaskStatus();
if (taskStatus.getState().isDone()) {
return 0;
}
return getPendingSourceSplitCount() + taskStatus.getQueuedPartitionedDrivers();
}
@Override
public int getUnacknowledgedPartitionedSplitCount()
{
return getPendingSourceSplitCount();
}
@SuppressWarnings("FieldAccessNotGuarded")
private int getPendingSourceSplitCount()
{
return pendingSourceSplitCount;
}
@Override
public void addStateChangeListener(StateChangeListener<TaskStatus> stateChangeListener)
{
try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
taskStatusFetcher.addStateChangeListener(stateChangeListener);
}
}
@Override
public void addFinalTaskInfoListener(StateChangeListener<TaskInfo> stateChangeListener)
{
taskInfoFetcher.addFinalTaskInfoListener(stateChangeListener);
}
@Override
public synchronized ListenableFuture<?> whenSplitQueueHasSpace(int threshold)
{
if (whenSplitQueueHasSpaceThreshold.isPresent()) {
checkArgument(threshold == whenSplitQueueHasSpaceThreshold.getAsInt(), "Multiple split queue space notification thresholds not supported");
}
else {
whenSplitQueueHasSpaceThreshold = OptionalInt.of(threshold);
updateSplitQueueSpace();
}
if (splitQueueHasSpace) {
return immediateFuture(null);
}
return whenSplitQueueHasSpace.createNewListener();
}
private synchronized void updateSplitQueueSpace()
{
// Must check whether the unacknowledged split count threshold is reached even without listeners registered yet
splitQueueHasSpace = getUnacknowledgedPartitionedSplitCount() < maxUnacknowledgedSplits &&
(whenSplitQueueHasSpaceThreshold.isEmpty() || getQueuedPartitionedSplitCount() < whenSplitQueueHasSpaceThreshold.getAsInt());
// Only trigger notifications if a listener might be registered
if (splitQueueHasSpace && whenSplitQueueHasSpaceThreshold.isPresent()) {
whenSplitQueueHasSpace.complete(null, executor);
}
}
private synchronized void processTaskUpdate(TaskInfo newValue, List<TaskSource> sources)
{
updateTaskInfo(newValue);
// remove acknowledged splits, which frees memory
for (TaskSource source : sources) {
PlanNodeId planNodeId = source.getPlanNodeId();
int removed = 0;
for (ScheduledSplit split : source.getSplits()) {
if (pendingSplits.remove(planNodeId, split)) {
removed++;
}
}
if (source.isNoMoreSplits()) {
noMoreSplits.put(planNodeId, false);
}
for (Lifespan lifespan : source.getNoMoreSplitsForLifespan()) {
pendingNoMoreSplitsForLifespan.remove(planNodeId, lifespan);
}
if (planFragment.isPartitionedSources(planNodeId)) {
pendingSourceSplitCount -= removed;
}
}
// Update node level split tracker before split queue space to ensure it's up to date before waking up the scheduler
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
updateSplitQueueSpace();
}
private void updateTaskInfo(TaskInfo taskInfo)
{
taskStatusFetcher.updateTaskStatus(taskInfo.getTaskStatus());
taskInfoFetcher.updateTaskInfo(taskInfo);
}
private void scheduleUpdate()
{
executor.execute(this::sendUpdate);
}
private synchronized void sendUpdate()
{
TaskStatus taskStatus = getTaskStatus();
// don't update if the task hasn't been started yet or if it is already finished
if (!started.get() || !needsUpdate.get() || taskStatus.getState().isDone()) {
return;
}
// if there is a request already running, wait for it to complete
if (this.currentRequest != null && !this.currentRequest.isDone()) {
return;
}
// if throttled due to error, asynchronously wait for timeout and try again
ListenableFuture<?> errorRateLimit = updateErrorTracker.acquireRequestPermit();
if (!errorRateLimit.isDone()) {
errorRateLimit.addListener(this::sendUpdate, executor);
return;
}
List<TaskSource> sources = getSources();
// Workers don't need the embedded JSON representation when the fragment is sent
Optional<PlanFragment> fragment = sendPlan.get() ? Optional.of(planFragment.withoutEmbeddedJsonRepresentation()) : Optional.empty();
TaskUpdateRequest updateRequest = new TaskUpdateRequest(
session.toSessionRepresentation(),
session.getIdentity().getExtraCredentials(),
fragment,
sources,
outputBuffers.get(),
totalPartitions);
byte[] taskUpdateRequestJson = taskUpdateRequestCodec.toJsonBytes(updateRequest);
if (fragment.isPresent()) {
stats.updateWithPlanBytes(taskUpdateRequestJson.length);
}
HttpUriBuilder uriBuilder = getHttpUriBuilder(taskStatus);
Request request = preparePost()
.setUri(uriBuilder.build())
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.JSON_UTF_8.toString())
.setBodyGenerator(createStaticBodyGenerator(taskUpdateRequestJson))
.build();
updateErrorTracker.startRequest();
ListenableFuture<JsonResponse<TaskInfo>> future = httpClient.executeAsync(request, createFullJsonResponseHandler(taskInfoCodec));
currentRequest = future;
currentRequestStartNanos = System.nanoTime();
// The needsUpdate flag needs to be set to false BEFORE adding the Future callback since callback might change the flag value
// and does so without grabbing the instance lock.
needsUpdate.set(false);
Futures.addCallback(future, new SimpleHttpResponseHandler<>(new UpdateResponseHandler(sources), request.getUri(), stats), executor);
}
private synchronized List<TaskSource> getSources()
{
return Stream.concat(planFragment.getPartitionedSourceNodes().stream(), planFragment.getRemoteSourceNodes().stream())
.filter(Objects::nonNull)
.map(PlanNode::getId)
.map(this::getSource)
.filter(Objects::nonNull)
.collect(toImmutableList());
}
private synchronized TaskSource getSource(PlanNodeId planNodeId)
{
Set<ScheduledSplit> splits = pendingSplits.get(planNodeId);
boolean pendingNoMoreSplits = Boolean.TRUE.equals(this.noMoreSplits.get(planNodeId));
boolean noMoreSplits = this.noMoreSplits.containsKey(planNodeId);
Set<Lifespan> noMoreSplitsForLifespan = pendingNoMoreSplitsForLifespan.get(planNodeId);
TaskSource element = null;
if (!splits.isEmpty() || !noMoreSplitsForLifespan.isEmpty() || pendingNoMoreSplits) {
element = new TaskSource(planNodeId, splits, noMoreSplitsForLifespan, noMoreSplits);
}
return element;
}
@Override
public synchronized void cancel()
{
try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
TaskStatus taskStatus = getTaskStatus();
if (taskStatus.getState().isDone()) {
return;
}
// send cancel to task and ignore response
HttpUriBuilder uriBuilder = getHttpUriBuilder(taskStatus).addParameter("abort", "false");
Request request = prepareDelete()
.setUri(uriBuilder.build())
.build();
scheduleAsyncCleanupRequest(createCleanupBackoff(), request, "cancel");
}
}
private synchronized void cleanUpTask()
{
checkState(getTaskStatus().getState().isDone(), "attempt to clean up a task that is not done yet");
// clear pending splits to free memory
pendingSplits.clear();
pendingSourceSplitCount = 0;
partitionedSplitCountTracker.setPartitionedSplitCount(getPartitionedSplitCount());
splitQueueHasSpace = true;
whenSplitQueueHasSpace.complete(null, executor);
// cancel pending request
if (currentRequest != null) {
currentRequest.cancel(true);
currentRequest = null;
currentRequestStartNanos = 0;
}
taskStatusFetcher.stop();
dynamicFiltersFetcher.stop();
// The remote task is likely to get a delete from the PageBufferClient first.
// We send an additional delete anyway to get the final TaskInfo
HttpUriBuilder uriBuilder = getHttpUriBuilder(getTaskStatus());
Request request = prepareDelete()
.setUri(uriBuilder.build())
.build();
scheduleAsyncCleanupRequest(createCleanupBackoff(), request, "cleanup");
}
@Override
public synchronized void abort()
{
if (getTaskStatus().getState().isDone()) {
return;
}
abort(failWith(getTaskStatus(), ABORTED, ImmutableList.of()));
}
private synchronized void abort(TaskStatus status)
{
checkState(status.getState().isDone(), "cannot abort task with an incomplete status");
try (SetThreadName ignored = new SetThreadName("HttpRemoteTask-%s", taskId)) {
taskStatusFetcher.updateTaskStatus(status);
// send abort to task
HttpUriBuilder uriBuilder = getHttpUriBuilder(getTaskStatus());
Request request = prepareDelete()
.setUri(uriBuilder.build())
.build();
scheduleAsyncCleanupRequest(createCleanupBackoff(), request, "abort");
}
}
private void scheduleAsyncCleanupRequest(Backoff cleanupBackoff, Request request, String action)
{
if (!aborting.compareAndSet(false, true)) {
// Do not initiate another round of cleanup requests if one had been initiated.
// Otherwise, we can get into an asynchronous recursion here. For example, when aborting a task after REMOTE_TASK_MISMATCH.
return;
}
doScheduleAsyncCleanupRequest(cleanupBackoff, request, action);
}
private void doScheduleAsyncCleanupRequest(Backoff cleanupBackoff, Request request, String action)
{
Futures.addCallback(httpClient.executeAsync(request, createFullJsonResponseHandler(taskInfoCodec)), new FutureCallback<>()
{
@Override
public void onSuccess(JsonResponse<TaskInfo> result)
{
try {
updateTaskInfo(result.getValue());
}
finally {
if (!getTaskInfo().getTaskStatus().getState().isDone()) {
cleanUpLocally();
}
}
}
@Override
public void onFailure(Throwable t)
{
if (t instanceof RejectedExecutionException && httpClient.isClosed()) {
logError(t, "Unable to %s task at %s. HTTP client is closed.", action, request.getUri());
cleanUpLocally();
return;
}
// record failure
if (cleanupBackoff.failure()) {
logError(t, "Unable to %s task at %s. Back off depleted.", action, request.getUri());
cleanUpLocally();
return;
}
// reschedule
long delayNanos = cleanupBackoff.getBackoffDelayNanos();
if (delayNanos == 0) {
doScheduleAsyncCleanupRequest(cleanupBackoff, request, action);
}
else {
errorScheduledExecutor.schedule(() -> doScheduleAsyncCleanupRequest(cleanupBackoff, request, action), delayNanos, NANOSECONDS);
}
}
private void cleanUpLocally()
{
// Update the taskInfo with the new taskStatus.
// Generally, we send a cleanup request to the worker, and update the TaskInfo on
// the coordinator based on what we fetched from the worker. If we somehow cannot
// get the cleanup request to the worker, the TaskInfo that we fetch for the worker
// likely will not say the task is done however many times we try. In this case,
// we have to set the local query info directly so that we stop trying to fetch
// updated TaskInfo from the worker. This way, the task on the worker eventually
// expires due to lack of activity.
// This is required because the query state machine depends on TaskInfo (instead of task status)
// to transition its own state.
// TODO: Update the query state machine and stage state machine to depend on TaskStatus instead
// Since this TaskInfo is updated in the client the "complete" flag will not be set,
// indicating that the stats may not reflect the final stats on the worker.
updateTaskInfo(getTaskInfo().withTaskStatus(getTaskStatus()));
}
}, executor);
}
/**
* Move the task directly to the failed state if there was a failure in this task
*/
private void failTask(Throwable cause)
{
TaskStatus taskStatus = getTaskStatus();
if (!taskStatus.getState().isDone()) {
log.debug(cause, "Remote task %s failed with %s", taskStatus.getSelf(), cause);
}
abort(failWith(getTaskStatus(), FAILED, ImmutableList.of(toFailure(cause))));
}
private HttpUriBuilder getHttpUriBuilder(TaskStatus taskStatus)
{
HttpUriBuilder uriBuilder = uriBuilderFrom(taskStatus.getSelf());
if (summarizeTaskInfo) {
uriBuilder.addParameter("summarize");
}
return uriBuilder;
}
private static Backoff createCleanupBackoff()
{
return new Backoff(10, new Duration(10, TimeUnit.MINUTES), Ticker.systemTicker(), ImmutableList.<Duration>builder()
.add(new Duration(0, MILLISECONDS))
.add(new Duration(100, MILLISECONDS))
.add(new Duration(500, MILLISECONDS))
.add(new Duration(1, SECONDS))
.add(new Duration(10, SECONDS))
.build());
}
@Override
public String toString()
{
return toStringHelper(this)
.addValue(getTaskInfo())
.toString();
}
private class UpdateResponseHandler
implements SimpleHttpResponseCallback<TaskInfo>
{
private final List<TaskSource> sources;
private UpdateResponseHandler(List<TaskSource> sources)
{
this.sources = ImmutableList.copyOf(requireNonNull(sources, "sources is null"));
}
@Override
public void success(TaskInfo value)
{
try (SetThreadName ignored = new SetThreadName("UpdateResponseHandler-%s", taskId)) {
try {
long currentRequestStartNanos;
synchronized (HttpRemoteTask.this) {
currentRequest = null;
sendPlan.set(value.isNeedsPlan());
currentRequestStartNanos = HttpRemoteTask.this.currentRequestStartNanos;
}
updateStats(currentRequestStartNanos);
processTaskUpdate(value, sources);
updateErrorTracker.requestSucceeded();
}
finally {
sendUpdate();
}
}
}
@Override
public void failed(Throwable cause)
{
try (SetThreadName ignored = new SetThreadName("UpdateResponseHandler-%s", taskId)) {
try {
long currentRequestStartNanos;
synchronized (HttpRemoteTask.this) {
currentRequest = null;
currentRequestStartNanos = HttpRemoteTask.this.currentRequestStartNanos;
}
updateStats(currentRequestStartNanos);
// on failure assume we need to update again
needsUpdate.set(true);
// if task not already done, record error
TaskStatus taskStatus = getTaskStatus();
if (!taskStatus.getState().isDone()) {
updateErrorTracker.requestFailed(cause);
}
}
catch (Error e) {
failTask(e);
throw e;
}
catch (RuntimeException e) {
failTask(e);
}
finally {
sendUpdate();
}
}
}
@Override
public void fatal(Throwable cause)
{
try (SetThreadName ignored = new SetThreadName("UpdateResponseHandler-%s", taskId)) {
failTask(cause);
}
}
private void updateStats(long currentRequestStartNanos)
{
Duration requestRoundTrip = Duration.nanosSince(currentRequestStartNanos);
stats.updateRoundTripMillis(requestRoundTrip.toMillis());
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AlreadyExpiredException;
import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue;
public class TTLFieldMapper extends MetadataFieldMapper {
public static final String NAME = "_ttl";
public static final String CONTENT_TYPE = "_ttl";
public static class Defaults extends LegacyLongFieldMapper.Defaults {
public static final String NAME = TTLFieldMapper.CONTENT_TYPE;
public static final TTLFieldType TTL_FIELD_TYPE = new TTLFieldType();
static {
TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
TTL_FIELD_TYPE.setStored(true);
TTL_FIELD_TYPE.setTokenized(false);
TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT);
TTL_FIELD_TYPE.setName(NAME);
TTL_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
TTL_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
TTL_FIELD_TYPE.freeze();
}
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED;
public static final long DEFAULT = -1;
}
public static class Builder extends MetadataFieldMapper.Builder<Builder, TTLFieldMapper> {
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
private long defaultTTL = Defaults.DEFAULT;
public Builder() {
super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE);
}
public Builder enabled(EnabledAttributeMapper enabled) {
this.enabledState = enabled;
return builder;
}
public Builder defaultTTL(long defaultTTL) {
this.defaultTTL = defaultTTL;
return builder;
}
@Override
public TTLFieldMapper build(BuilderContext context) {
setupFieldType(context);
fieldType.setHasDocValues(false);
return new TTLFieldMapper(fieldType, enabledState, defaultTTL, context.indexSettings());
}
}
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) {
throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field.");
}
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
iterator.remove();
} else if (fieldName.equals("default")) {
TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null);
if (ttlTimeValue != null) {
builder.defaultTTL(ttlTimeValue.millis());
}
iterator.remove();
}
}
return builder;
}
@Override
public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) {
return new TTLFieldMapper(indexSettings);
}
}
public static final class TTLFieldType extends LegacyLongFieldMapper.LongFieldType {
public TTLFieldType() {
}
protected TTLFieldType(TTLFieldType ref) {
super(ref);
}
@Override
public TTLFieldType clone() {
return new TTLFieldType(this);
}
// Overrides valueForSearch to display live value of remaining ttl
@Override
public Object valueForDisplay(Object value) {
final long now = System.currentTimeMillis();
Long val = (Long) super.valueForDisplay(value);
return val - now;
}
}
private EnabledAttributeMapper enabledState;
private long defaultTTL;
private TTLFieldMapper(Settings indexSettings) {
this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, indexSettings);
}
private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL,
Settings indexSettings) {
super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings);
if (enabled.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) {
throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field.");
}
this.enabledState = enabled;
this.defaultTTL = defaultTTL;
}
public boolean enabled() {
return this.enabledState.enabled;
}
public long defaultTTL() {
return this.defaultTTL;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public Mapper parse(ParseContext context) throws IOException, MapperParsingException {
if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally
long ttl;
if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(context.parser().text(), null, "ttl").millis();
} else {
ttl = context.parser().longValue(true);
}
if (ttl <= 0) {
throw new MapperParsingException("TTL value must be > 0. Illegal value provided [" + ttl + "]");
}
context.sourceToParse().ttl(ttl);
}
return null;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException {
if (enabledState.enabled) {
long ttl = context.sourceToParse().ttl();
if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value
ttl = defaultTTL;
context.sourceToParse().ttl(ttl);
}
if (ttl > 0) { // a ttl has been provided either externally or in the _source
long timestamp = context.sourceToParse().timestamp();
long expire = new Date(timestamp + ttl).getTime();
long now = System.currentTimeMillis();
// there is not point indexing already expired doc
if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) {
throw new AlreadyExpiredException(context.sourceToParse().index(),
context.sourceToParse().type(), context.sourceToParse().id(), timestamp, ttl, now);
}
// the expiration timestamp (timestamp + ttl) is set as field
fields.add(new LegacyLongFieldMapper.CustomLongNumericField(expire, fieldType()));
}
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all are defaults, no sense to write it at all
if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && defaultTTL == Defaults.DEFAULT) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
builder.field("enabled", enabledState.enabled);
}
if (includeDefaults || defaultTTL != Defaults.DEFAULT && enabledState.enabled) {
builder.field("default", defaultTTL);
}
builder.endObject();
return builder;
}
@Override
protected String contentType() {
return NAME;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith;
if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with
if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) {
throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled.");
} else {
this.enabledState = ttlMergeWith.enabledState;
}
}
if (ttlMergeWith.defaultTTL != -1) {
// we never build the default when the field is disabled so we should also not set it
// (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster)
if (enabledState == EnabledAttributeMapper.ENABLED) {
this.defaultTTL = ttlMergeWith.defaultTTL;
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class SourceFieldMapperTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testNoFormat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("field", "value")
.endObject().bytes(),
XContentType.JSON));
assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON));
documentMapper = parser.parse("type", new CompressedXContent(mapping));
doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.smileBuilder().startObject()
.field("field", "value")
.endObject().bytes(),
XContentType.SMILE));
assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE));
}
public void testIncludes() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", new String[]{"path1*"}).endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject()
.startObject("path2").field("field2", "value2").endObject()
.endObject().bytes(),
XContentType.JSON));
IndexableField sourceField = doc.rootDoc().getField("_source");
Map<String, Object> sourceAsMap;
try (XContentParser parser = createParser(JsonXContent.jsonXContent, new BytesArray(sourceField.binaryValue()))) {
sourceAsMap = parser.map();
}
assertThat(sourceAsMap.containsKey("path1"), equalTo(true));
assertThat(sourceAsMap.containsKey("path2"), equalTo(false));
}
public void testExcludes() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", new String[]{"path1*"}).endObject()
.endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject()
.startObject("path2").field("field2", "value2").endObject()
.endObject().bytes(),
XContentType.JSON));
IndexableField sourceField = doc.rootDoc().getField("_source");
Map<String, Object> sourceAsMap;
try (XContentParser parser = createParser(JsonXContent.jsonXContent, new BytesArray(sourceField.binaryValue()))) {
sourceAsMap = parser.map();
}
assertThat(sourceAsMap.containsKey("path1"), equalTo(false));
assertThat(sourceAsMap.containsKey("path2"), equalTo(true));
}
public void testDefaultMappingAndNoMapping() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapper = parser.parse("my_type", null, defaultMapping);
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(false));
try {
mapper = parser.parse(null, null, defaultMapping);
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(false));
fail();
} catch (MapperParsingException e) {
// all is well
}
try {
mapper = parser.parse(null, new CompressedXContent("{}"), defaultMapping);
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(false));
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), equalTo("malformed mapping no root object found"));
// all is well
}
}
public void testDefaultMappingAndWithMappingOverride() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type")
.startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser()
.parse("my_type", new CompressedXContent(mapping), defaultMapping);
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(true));
}
public void testDefaultMappingAndNoMappingWithMapperService() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0).build();
MapperService mapperService = createIndex("test", settings).mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").getDocumentMapper();
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(false));
}
public void testDefaultMappingAndWithMappingOverrideWithMapperService() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0).build();
MapperService mapperService = createIndex("test", settings).mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type")
.startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string();
mapperService.merge("my_type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
DocumentMapper mapper = mapperService.documentMapper("my_type");
assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(true));
}
void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException {
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping1));
docMapper = parser.parse("type", docMapper.mappingSource());
if (conflicts.length == 0) {
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false);
} else {
try {
docMapper.merge(parser.parse("type", new CompressedXContent(mapping2)).mapping(), false);
fail();
} catch (IllegalArgumentException e) {
for (String conflict : conflicts) {
assertThat(e.getMessage(), containsString(conflict));
}
}
}
}
public void testEnabledNotUpdateable() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
// using default of true
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping2, parser, "Cannot update enabled setting for [_source]");
// not changing is ok
String mapping3 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping3, parser);
}
public void testIncludesNotUpdateable() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*").endObject()
.endObject().endObject().string();
assertConflicts(defaultMapping, mapping1, parser, "Cannot update includes setting for [_source]");
assertConflicts(mapping1, defaultMapping, parser, "Cannot update includes setting for [_source]");
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*", "bar.*").endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping2, parser, "Cannot update includes setting for [_source]");
// not changing is ok
assertConflicts(mapping1, mapping1, parser);
}
public void testExcludesNotUpdateable() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*").endObject()
.endObject().endObject().string();
assertConflicts(defaultMapping, mapping1, parser, "Cannot update excludes setting for [_source]");
assertConflicts(mapping1, defaultMapping, parser, "Cannot update excludes setting for [_source]");
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*", "bar.*").endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping2, parser, "Cannot update excludes setting for [_source]");
// not changing is ok
assertConflicts(mapping1, mapping1, parser);
}
public void testComplete() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
assertTrue(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*").endObject()
.endObject().endObject().string();
assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*").endObject()
.endObject().endObject().string();
assertFalse(parser.parse("type", new CompressedXContent(mapping)).sourceMapper().isComplete());
}
public void testSourceObjectContainsExtraTokens() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
documentMapper.parse(SourceToParse.source("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object (invalid JSON)
fail("Expected parse exception");
} catch (MapperParsingException e) {
assertNotNull(e.getRootCause());
String message = e.getRootCause().getMessage();
assertTrue(message, message.contains("Unexpected close marker '}'"));
}
}
}
| |
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2010 All Rights Reserved.
*/
package org.dita.dost.util;
import static org.dita.dost.util.Constants.*;
import static org.apache.commons.io.FileUtils.*;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
import javax.xml.parsers.DocumentBuilder;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.dita.dost.log.DITAOTLogger;
import org.dita.dost.log.DITAOTAntLogger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* This class is for converting charset and escaping
* entities in html help component files.
*
* @version 1.0 2010-09-30
*
* @author Zhang Di Hua
*/
public final class ConvertLang extends Task {
private static final String ATTRIBUTE_FORMAT_VALUE_WINDOWS = "windows";
private static final String ATTRIBUTE_FORMAT_VALUE_HTML = "html";
private static final String tag1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
private static final String tag2 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>[OPTIONS]";
private static final String tag3 = "<?xml version=\"1.0\" encoding=\"utf-8\"?>";
private String basedir;
private String outputdir;
private String message;
private String langcode;
//charset map(e.g html = iso-8859-1)
private final Map<String, String>charsetMap = new HashMap<String, String>();
//lang map(e.g ar- = 0x0c01 Arabic (EGYPT))
private final Map<String, String>langMap = new HashMap<String, String>();
//entity map(e.g 38 = &)
private final Map<String, String>entityMap = new HashMap<String, String>();
private DITAOTLogger logger;
/**
* Executes the Ant task.
*/
@Override
public void execute(){
logger = new DITAOTAntLogger(getProject());
logger.info(message);
//ensure outdir is absolute
if (!new File(outputdir).isAbsolute()) {
outputdir = new File(basedir, outputdir).getAbsolutePath();
}
//initialize language map
createLangMap();
//initialize entity map
createEntityMap();
//initialize charset map
createCharsetMap();
//change charset of html files
convertHtmlCharset();
//update entity and lang code
updateAllEntitiesAndLangs();
}
private void createLangMap() {
final Properties entities = new Properties();
InputStream in = null;
try {
in = getClass().getClassLoader().getResourceAsStream("org/dita/dost/util/languages.properties");
entities.load(in);
} catch (final IOException e) {
throw new RuntimeException("Failed to read language property file: " + e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (final IOException e) {}
}
}
for (final Entry<Object, Object> e: entities.entrySet()) {
langMap.put((String) e.getKey(), (String) e.getValue());
}
}
private void createEntityMap(){
final Properties entities = new Properties();
InputStream in = null;
try {
in = getClass().getClassLoader().getResourceAsStream("org/dita/dost/util/entities.properties");
entities.load(in);
} catch (final IOException e) {
throw new RuntimeException("Failed to read entities property file: " + e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (final IOException e) {}
}
}
for (final Entry<Object, Object> e: entities.entrySet()) {
entityMap.put((String) e.getKey(), (String) e.getValue());
}
}
private void createCharsetMap() {
InputStream in = null;
try {
in = getClass().getClassLoader().getResourceAsStream("org/dita/dost/util/codepages.xml");
final DocumentBuilder builder = XMLUtils.getDocumentBuilder();
final Document doc = builder.parse(in);
final Element root = doc.getDocumentElement();
final NodeList childNodes = root.getChildNodes();
//search the node with langcode
for(int i = 0; i< childNodes.getLength(); i++){
final Node node = childNodes.item(i);
//only for element node
if(node.getNodeType() == Node.ELEMENT_NODE){
final Element e = (Element)node;
final String lang = e.getAttribute(ATTRIBUTE_NAME_LANG);
//node found
if(langcode.equalsIgnoreCase(lang)||
lang.startsWith(langcode)){
//store the value into a map
//charsetMap = new HashMap<String, String>();
//iterate child nodes skip the 1st one
final NodeList subChild = e.getChildNodes();
for(int j = 0; j< subChild.getLength(); j++){
final Node subNode = subChild.item(j);
if(subNode.getNodeType() == Node.ELEMENT_NODE){
final Element elem = (Element)subNode;
final String format = elem.getAttribute(ATTRIBUTE_NAME_FORMAT);
final String charset = elem.getAttribute(ATTRIBUTE_NAME_CHARSET);
//store charset into map
charsetMap.put(format, charset);
}
}
break;
}
}
}
//no matched charset is found set default value en-us
if(charsetMap.size() == 0){
charsetMap.put(ATTRIBUTE_FORMAT_VALUE_HTML, "iso-8859-1");
charsetMap.put(ATTRIBUTE_FORMAT_VALUE_WINDOWS, "windows-1252");
}
} catch (final Exception e) {
throw new RuntimeException("Failed to read charset configuration file: " + e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (final IOException e) {}
}
}
}
private String replaceXmlTag(final String source,final String tag){
final int startPos = source.indexOf(tag);
final int endPos = startPos + tag.length();
return source.substring(0, startPos) + source.substring(endPos);
}
private void convertHtmlCharset() {
final File outputDir = new File(outputdir);
final File[] files = outputDir.listFiles();
if (files != null) {
for (final File file : files) {
//Recursive method
convertCharset(file);
}
}
}
//Recursive method
private void convertCharset(final File inputFile){
if(inputFile.isDirectory()){
final File[] files = inputFile.listFiles();
if (files != null) {
for (final File file : files) {
convertCharset(file);
}
}
}else if(FileUtils.isHTMLFile(inputFile.getName())||
FileUtils.isHHCFile(inputFile.getName())||
FileUtils.isHHKFile(inputFile.getName())){
final String fileName = inputFile.getAbsolutePath();
final File outputFile = new File(fileName + FILE_EXTENSION_TEMP);
log("Processing " + fileName, Project.MSG_INFO);
BufferedReader reader = null;
Writer writer = null;
try {
//prepare for the input and output
final FileInputStream inputStream = new FileInputStream(inputFile);
final InputStreamReader streamReader = new InputStreamReader(inputStream, UTF8);
reader = new BufferedReader(streamReader);
final FileOutputStream outputStream = new FileOutputStream(outputFile);
final OutputStreamWriter streamWriter = new OutputStreamWriter(outputStream, UTF8);
writer = new BufferedWriter(streamWriter);
String value = reader.readLine();
while(value != null){
//meta tag contains charset found
if(value.contains("<meta http-equiv") && value.contains("charset")){
final int insertPoint = value.indexOf("charset=") + "charset=".length();
final String subString = value.substring(0, insertPoint);
final int remainIndex = value.indexOf(UTF8) + UTF8.length();
final String remainString = value.substring(remainIndex);
//change the charset
final String newValue = subString + charsetMap.get(ATTRIBUTE_FORMAT_VALUE_HTML) + remainString;
//write into the output file
writer.write(newValue);
//add line break
writer.write(LINE_SEPARATOR);
}else{
if(value.contains(tag1)){
value = replaceXmlTag(value,tag1);
}else if(value.contains(tag2)){
value = replaceXmlTag(value,tag2);
}else if(value.contains(tag3)){
value = replaceXmlTag(value,tag3);
}
//other values
writer.write(value);
writer.write(LINE_SEPARATOR);
}
value = reader.readLine();
}
} catch (final FileNotFoundException e) {
logger.error(e.getMessage(), e) ;
} catch (final UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (final IOException e) {
logger.error(e.getMessage(), e) ;
} finally {
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
logger.error("Failed to close input stream: " + e.getMessage());
}
}
if (writer != null) {
try {
writer.close();
} catch (final IOException e) {
logger.error("Failed to close output stream: " + e.getMessage());
}
}
}
try {
deleteQuietly(inputFile);
moveFile(outputFile, inputFile);
} catch (final Exception e) {
logger.error("Failed to replace " + inputFile + ": " + e.getMessage());
}
}
}
private void updateAllEntitiesAndLangs() {
final File outputDir = new File(outputdir);
final File[] files = outputDir.listFiles();
if (files != null) {
for (final File file : files) {
//Recursive method
updateEntityAndLang(file);
}
}
}
//Recursive method
private void updateEntityAndLang(final File inputFile) {
//directory case
if(inputFile.isDirectory()){
final File[] files = inputFile.listFiles();
if (files != null) {
for (final File file : files) {
updateEntityAndLang(file);
}
}
}
//html/hhc/hhk file case
else if(FileUtils.isHTMLFile(inputFile.getName())||
FileUtils.isHHCFile(inputFile.getName())||
FileUtils.isHHKFile(inputFile.getName())){
//do converting work
convertEntityAndCharset(inputFile, ATTRIBUTE_FORMAT_VALUE_HTML);
}
//hhp file case
else if(FileUtils.isHHPFile(inputFile.getName())){
//do converting work
convertEntityAndCharset(inputFile, ATTRIBUTE_FORMAT_VALUE_WINDOWS);
//update language setting of hhp file
final String fileName = inputFile.getAbsolutePath();
final File outputFile = new File(fileName + FILE_EXTENSION_TEMP);
//get new charset
final String charset = charsetMap.get(ATTRIBUTE_FORMAT_VALUE_WINDOWS);
BufferedReader reader = null;
BufferedWriter writer = null;
try {
//prepare for the input and output
final FileInputStream inputStream = new FileInputStream(inputFile);
final InputStreamReader streamReader = new InputStreamReader(inputStream, charset);
//wrapped into reader
reader = new BufferedReader(streamReader);
final FileOutputStream outputStream = new FileOutputStream(outputFile);
//convert charset
final OutputStreamWriter streamWriter = new OutputStreamWriter(outputStream, charset);
//wrapped into writer
writer = new BufferedWriter(streamWriter);
String value = reader.readLine();
while(value != null){
if(value.contains(tag1)){
value = replaceXmlTag(value,tag1);
}else if(value.contains(tag2)){
value = replaceXmlTag(value,tag2);
}else if(value.contains(tag3)){
value = replaceXmlTag(value,tag3);
}
//meta tag contains charset found
if(value.contains("Language=")){
String newValue = langMap.get(langcode);
if (newValue == null) {
newValue = langMap.get(langcode.split("-")[0]);
}
if (newValue != null) {
writer.write("Language=" + newValue);
writer.write(LINE_SEPARATOR);
} else {
throw new IllegalArgumentException("Unsupported language code '" + langcode + "', unable to map to a Locale ID.");
}
}else{
//other values
writer.write(value);
writer.write(LINE_SEPARATOR);
}
value = reader.readLine();
}
} catch (final FileNotFoundException e) {
logger.error(e.getMessage(), e) ;
} catch (final UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (final IOException e) {
logger.error(e.getMessage(), e) ;
} finally {
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
logger.error("Failed to close input stream: " + e.getMessage());
}
}
if (writer != null) {
try {
writer.close();
} catch (final IOException e) {
logger.error("Failed to close output stream: " + e.getMessage());
}
}
}
try {
deleteQuietly(inputFile);
moveFile(outputFile, inputFile);
} catch (final Exception e) {
logger.error("Failed to replace " + inputFile + ": " + e.getMessage());
}
}
}
private void convertEntityAndCharset(final File inputFile, final String format) {
final String fileName = inputFile.getAbsolutePath();
final File outputFile = new File(fileName + FILE_EXTENSION_TEMP);
BufferedReader reader = null;
BufferedWriter writer = null;
try {
//prepare for the input and output
final FileInputStream inputStream = new FileInputStream(inputFile);
final InputStreamReader streamReader = new InputStreamReader(inputStream, UTF8);
//wrapped into reader
reader = new BufferedReader(streamReader);
final FileOutputStream outputStream = new FileOutputStream(outputFile);
//get new charset
final String charset = charsetMap.get(format);
//convert charset
final OutputStreamWriter streamWriter = new OutputStreamWriter(outputStream, charset);
//wrapped into writer
writer = new BufferedWriter(streamWriter);
//read a character
int charCode = reader.read();
while(charCode != -1){
final String key = String.valueOf(charCode);
//Is an entity char
if(entityMap.containsKey(key)){
//get related entity
final String value = entityMap.get(key);
//write entity into output file
writer.write(value);
}else{
//normal process
writer.write(charCode);
}
charCode = reader.read();
}
} catch (final FileNotFoundException e) {
logger.error(e.getMessage(), e) ;
} catch (final UnsupportedEncodingException e) {
logger.error(e.getMessage(), e) ;
} catch (final IOException e) {
logger.error(e.getMessage(), e) ;
} finally {
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
logger.error("Failed to close input stream: " + e.getMessage());
}
}
if (writer != null) {
try {
writer.close();
} catch (final IOException e) {
logger.error("Failed to close output stream: " + e.getMessage());
}
}
}
try {
deleteQuietly(inputFile);
moveFile(outputFile, inputFile);
} catch (final Exception e) {
logger.error("Failed to replace " + inputFile + ": " + e.getMessage());
}
}
public void setBasedir(final String basedir) {
this.basedir = basedir;
}
public void setLangcode(final String langcode) {
this.langcode = langcode;
}
public void setMessage(final String message) {
this.message = message;
}
public void setOutputdir(final String outputdir) {
this.outputdir = outputdir;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.core.authz.support;
import java.util.Collection;
import java.util.Iterator;
import org.apache.directory.api.ldap.aci.ACITuple;
import org.apache.directory.api.ldap.aci.ProtectedItem;
import org.apache.directory.api.ldap.aci.protectedItem.AllAttributeValuesItem;
import org.apache.directory.api.ldap.aci.protectedItem.AttributeTypeItem;
import org.apache.directory.api.ldap.aci.protectedItem.AttributeValueItem;
import org.apache.directory.api.ldap.aci.protectedItem.ClassesItem;
import org.apache.directory.api.ldap.aci.protectedItem.MaxImmSubItem;
import org.apache.directory.api.ldap.aci.protectedItem.MaxValueCountElem;
import org.apache.directory.api.ldap.aci.protectedItem.MaxValueCountItem;
import org.apache.directory.api.ldap.aci.protectedItem.RangeOfValuesItem;
import org.apache.directory.api.ldap.aci.protectedItem.RestrictedByElem;
import org.apache.directory.api.ldap.aci.protectedItem.RestrictedByItem;
import org.apache.directory.api.ldap.aci.protectedItem.SelfValueItem;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.server.core.api.event.Evaluator;
import org.apache.directory.server.core.api.subtree.RefinementEvaluator;
import org.apache.directory.server.i18n.I18n;
/**
* An {@link ACITupleFilter} that discards all tuples whose {@link ProtectedItem}s
* are not related with the operation. (18.8.3.2, X.501)
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class RelatedProtectedItemFilter implements ACITupleFilter
{
private final RefinementEvaluator refinementEvaluator;
private final Evaluator entryEvaluator;
private final SchemaManager schemaManager;
public RelatedProtectedItemFilter( RefinementEvaluator refinementEvaluator, Evaluator entryEvaluator,
SchemaManager schemaManager )
{
this.refinementEvaluator = refinementEvaluator;
this.entryEvaluator = entryEvaluator;
this.schemaManager = schemaManager;
}
/**
* {@inheritDoc}
*/
@Override
public Collection<ACITuple> filter( AciContext aciContext, OperationScope scope, Entry userEntry )
throws LdapException
{
if ( aciContext.getAciTuples().isEmpty() )
{
return aciContext.getAciTuples();
}
for ( Iterator<ACITuple> i = aciContext.getAciTuples().iterator(); i.hasNext(); )
{
ACITuple tuple = i.next();
if ( !isRelated( tuple, scope, aciContext.getUserDn(), aciContext.getEntryDn(),
aciContext.getAttributeType(), aciContext.getAttrValue(), aciContext.getEntry() ) )
{
i.remove();
}
}
return aciContext.getAciTuples();
}
private boolean isRelated( ACITuple tuple, OperationScope scope, Dn userName, Dn entryName,
AttributeType attributeType, Value attrValue, Entry entry ) throws LdapException, InternalError
{
String oid = null;
if ( attributeType != null )
{
oid = attributeType.getOid();
}
for ( ProtectedItem item : tuple.getProtectedItems() )
{
if ( item == ProtectedItem.ENTRY )
{
if ( scope != OperationScope.ENTRY )
{
continue;
}
return true;
}
else if ( ( item == ProtectedItem.ALL_USER_ATTRIBUTE_TYPES )
|| ( item == ProtectedItem.ALL_USER_ATTRIBUTE_TYPES_AND_VALUES ) )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE && scope != OperationScope.ATTRIBUTE_TYPE_AND_VALUE )
{
continue;
}
return true;
}
else if ( item instanceof AllAttributeValuesItem )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE_AND_VALUE )
{
continue;
}
AllAttributeValuesItem aav = ( AllAttributeValuesItem ) item;
for ( Iterator<AttributeType> iterator = aav.iterator(); iterator.hasNext(); )
{
AttributeType attr = iterator.next();
if ( oid.equals( attr.getOid() ) )
{
return true;
}
}
}
else if ( item instanceof AttributeTypeItem )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE )
{
continue;
}
AttributeTypeItem at = ( AttributeTypeItem ) item;
for ( Iterator<AttributeType> iterator = at.iterator(); iterator.hasNext(); )
{
AttributeType attr = iterator.next();
if ( oid.equals( attr.getOid() ) )
{
return true;
}
}
}
else if ( item instanceof AttributeValueItem )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE_AND_VALUE )
{
continue;
}
AttributeValueItem av = ( AttributeValueItem ) item;
for ( Iterator<Attribute> j = av.iterator(); j.hasNext(); )
{
Attribute entryAttribute = j.next();
AttributeType attr = entryAttribute.getAttributeType();
String attrOid;
if ( attr != null )
{
attrOid = entryAttribute.getAttributeType().getOid();
}
else
{
attr = schemaManager.lookupAttributeTypeRegistry( entryAttribute.getId() );
attrOid = attr.getOid();
entryAttribute.apply( attr );
}
if ( oid.equals( attrOid ) && entryAttribute.contains( attrValue ) )
{
return true;
}
}
}
else if ( item instanceof ClassesItem )
{
ClassesItem refinement = ( ClassesItem ) item;
if ( refinementEvaluator
.evaluate( refinement.getClasses(), entry.get( SchemaConstants.OBJECT_CLASS_AT ) ) )
{
return true;
}
}
else if ( item instanceof MaxImmSubItem )
{
return true;
}
else if ( item instanceof MaxValueCountItem )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE_AND_VALUE )
{
continue;
}
MaxValueCountItem mvc = ( MaxValueCountItem ) item;
for ( Iterator<MaxValueCountElem> j = mvc.iterator(); j.hasNext(); )
{
MaxValueCountElem mvcItem = j.next();
if ( oid.equals( mvcItem.getAttributeType().getOid() ) )
{
return true;
}
}
}
else if ( item instanceof RangeOfValuesItem )
{
RangeOfValuesItem rov = ( RangeOfValuesItem ) item;
if ( entryEvaluator.evaluate( rov.getRefinement(), entryName, entry ) )
{
return true;
}
}
else if ( item instanceof RestrictedByItem )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE_AND_VALUE )
{
continue;
}
RestrictedByItem rb = ( RestrictedByItem ) item;
for ( Iterator<RestrictedByElem> j = rb.iterator(); j.hasNext(); )
{
RestrictedByElem rbItem = j.next();
if ( oid.equals( rbItem.getAttributeType().getOid() ) )
{
return true;
}
}
}
else if ( item instanceof SelfValueItem )
{
if ( scope != OperationScope.ATTRIBUTE_TYPE_AND_VALUE && scope != OperationScope.ATTRIBUTE_TYPE )
{
continue;
}
SelfValueItem sv = ( SelfValueItem ) item;
for ( Iterator<AttributeType> iterator = sv.iterator(); iterator.hasNext(); )
{
AttributeType attr = iterator.next();
if ( oid.equals( attr.getOid() ) )
{
Attribute entryAttribute = entry.get( oid );
if ( ( entryAttribute != null ) && entryAttribute.contains( userName.getNormName() ) )
{
return true;
}
}
}
}
else
{
throw new InternalError( I18n.err( I18n.ERR_232, item.getClass().getName() ) );
}
}
return false;
}
}
| |
/*
Copyright (C) 2001, 2008 United States Government as represented by
the Administrator of the National Aeronautics and Space Administration.
All Rights Reserved.
*/
package gov.nasa.worldwind.data;
import gov.nasa.worldwind.avlist.*;
import gov.nasa.worldwind.geom.Sector;
import gov.nasa.worldwind.util.*;
/**
* @author dcollins
* @version $Id: AbstractDataRasterReader.java 8321 2009-01-05 17:06:14Z dcollins $
*/
public abstract class AbstractDataRasterReader implements DataRasterReader
{
private final String description;
private final String[] mimeTypes;
private final String[] suffixes;
public AbstractDataRasterReader(String description, String[] mimeTypes, String[] suffixes)
{
this.description = description;
this.mimeTypes = copyOf(mimeTypes);
this.suffixes = copyOf(suffixes);
}
public AbstractDataRasterReader(String[] mimeTypes, String[] suffixes)
{
this.description = descriptionFromSuffixes(suffixes);
this.mimeTypes = copyOf(mimeTypes);
this.suffixes = copyOf(suffixes);
}
protected AbstractDataRasterReader(String description)
{
this.description = description;
this.mimeTypes = new String[0];
this.suffixes = new String[0];
}
public String getDescription()
{
return this.description;
}
public String[] getMimeTypes()
{
String[] copy = new String[mimeTypes.length];
System.arraycopy(mimeTypes, 0, copy, 0, mimeTypes.length);
return copy;
}
public String[] getSuffixes()
{
String[] copy = new String[suffixes.length];
System.arraycopy(suffixes, 0, copy, 0, suffixes.length);
return copy;
}
public boolean canRead(DataSource source)
{
if (source == null || source.getSource() == null)
return false;
//noinspection SimplifiableIfStatement
if (!this.canReadSuffix(source))
return false;
return this.doCanRead(source);
}
protected boolean canReadSuffix(DataSource source)
{
// If the source has no path, we cannot return failure, so return that the test passed.
String path = pathFor(source);
if (path == null)
return true;
// If the source has a suffix, then we return success if this reader supports the suffix.
String pathSuffix = WWIO.getSuffix(path);
boolean matchesAny = false;
for (String suffix : suffixes)
{
if (suffix.equalsIgnoreCase(pathSuffix))
{
matchesAny = true;
break;
}
}
return matchesAny;
}
public DataRaster[] read(DataSource source) throws java.io.IOException
{
if (!this.canRead(source))
{
String message = Logging.getMessage("DataRaster.CannotRead", source);
Logging.logger().severe(message);
throw new java.io.IOException(message);
}
return this.doRead(source);
}
public void readMetadata(DataSource source, AVList values) throws java.io.IOException
{
if (!this.canRead(source))
{
String message = Logging.getMessage("DataRaster.CannotRead", source);
Logging.logger().severe(message);
throw new java.io.IOException(message);
}
if (values == null)
{
String message = Logging.getMessage("nullValue.AVListIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.doReadMetadata(source, values);
String message = this.validateMetadata(source, values);
if (message != null)
{
Logging.logger().severe(message);
throw new java.io.IOException(message);
}
}
protected String validateMetadata(DataSource source, AVList values)
{
StringBuilder sb = new StringBuilder();
Object o = values.getValue(AVKey.WIDTH);
if (o == null || !(o instanceof Integer))
sb.append(sb.length() > 0 ? ", " : "").append(Logging.getMessage("WorldFile.NoSizeSpecified", source));
o = values.getValue(AVKey.HEIGHT);
if (o == null || !(o instanceof Integer))
sb.append(sb.length() > 0 ? ", " : "").append(Logging.getMessage("WorldFile.NoSizeSpecified", source));
o = values.getValue(AVKey.SECTOR);
if (o == null || !(o instanceof Sector))
sb.append(sb.length() > 0 ? ", " : "").append(Logging.getMessage("WorldFile.NoSectorSpecified", source));
if (sb.length() == 0)
return null;
return sb.toString();
}
protected abstract boolean doCanRead(DataSource source);
protected abstract DataRaster[] doRead(DataSource source) throws java.io.IOException;
protected abstract void doReadMetadata(DataSource source, AVList values) throws java.io.IOException;
//**************************************************************//
//******************** Utilities *****************************//
//**************************************************************//
protected static String pathFor(DataSource source)
{
if (source.getSource() instanceof String)
{
return (String) source.getSource();
}
else if (source.getSource() instanceof java.io.File)
{
return ((java.io.File) source.getSource()).getPath();
}
else if (source.getSource() instanceof java.net.URI)
{
return source.getSource().toString();
}
else if (source.getSource() instanceof java.net.URL)
{
return source.getSource().toString();
}
return null;
}
private static String[] copyOf(String[] array)
{
String[] copy = new String[array.length];
for (int i = 0; i < array.length; i++)
copy[i] = array[i].toLowerCase();
return copy;
}
private static String descriptionFromSuffixes(String[] suffixes)
{
StringBuilder sb = new StringBuilder();
for (String suffix : suffixes)
{
if (sb.length() > 0)
sb.append(", ");
sb.append("*.").append(suffix.toLowerCase());
}
return sb.toString();
}
}
| |
package com.rs.io;
import com.rs.game.player.Player;
import com.rs.utils.Utils;
public final class OutputStream extends Stream {
private static final int[] BIT_MASK = new int[32];
private int opcodeStart = 0;
static {
for (int i = 0; i < 32; i++)
BIT_MASK[i] = (1 << i) - 1;
}
public OutputStream(int capacity) {
setBuffer(new byte[capacity]);
}
public OutputStream() {
setBuffer(new byte[16]);
}
public OutputStream(byte[] buffer) {
this.setBuffer(buffer);
this.offset = buffer.length;
length = buffer.length;
}
public OutputStream(int[] buffer) {
setBuffer(new byte[buffer.length]);
for (int value : buffer)
writeByte(value);
}
public void checkCapacityPosition(int position) {
if (position >= getBuffer().length) {
byte[] newBuffer = new byte[position + 16];
System.arraycopy(getBuffer(), 0, newBuffer, 0, getBuffer().length);
setBuffer(newBuffer);
}
}
public void skip(int length) {
setOffset(getOffset() + length);
}
public void setOffset(int offset) {
this.offset = offset;
}
public void writeBytes(byte[] b, int offset, int length) {
checkCapacityPosition(this.getOffset() + length - offset);
System.arraycopy(b, offset, getBuffer(), this.getOffset(), length);
this.setOffset(this.getOffset() + (length - offset));
}
public void writeBytes(byte[] b) {
int offset = 0;
int length = b.length;
checkCapacityPosition(this.getOffset() + length - offset);
System.arraycopy(b, offset, getBuffer(), this.getOffset(), length);
this.setOffset(this.getOffset() + (length - offset));
}
public void addBytes128(byte[] data, int offset, int len) {
for (int k = offset; k < len; k++)
writeByte((byte) (data[k] + 128));
}
public void addBytesS(byte[] data, int offset, int len) {
for (int k = offset; k < len; k++)
writeByte((byte) (-128 + data[k]));
}
public void addBytes_Reverse(byte[] data, int offset, int len) {
for (int i = len - 1; i >= 0; i--) {
writeByte((data[i]));
}
}
public void addBytes_Reverse128(byte[] data, int offset, int len) {
for (int i = len - 1; i >= 0; i--) {
writeByte((byte) (data[i] + 128));
}
}
public void writeByte(int i) {
writeByte(i, offset++);
}
public void writeNegativeByte(int i) {
writeByte(-i, offset++);
}
public void writeByte(int i, int position) {
checkCapacityPosition(position);
getBuffer()[position] = (byte) i;
}
public void writeByte128(int i) {
writeByte(i + 128);
}
public void writeByteC(int i) {
writeByte(-i);
}
public void write128Byte(int i) {
writeByte(128 - i);
}
public void writeShortLE128(int i) {
writeByte(i + 128);
writeByte(i >> 8);
}
public void writeShort128(int i) {
writeByte(i >> 8);
writeByte(i + 128);
}
public void writeSmart(int i) {
if (i >= 128) {
writeShort(i + 32768);
} else {
writeByte(i);
}
}
public void writeBigSmart(int i) {
if (i >= Short.MAX_VALUE)
writeInt(i - Integer.MAX_VALUE - 1);
else {
writeShort(i >= 0 ? i : 32767);
}
}
public void writeShort(int i) {
writeByte(i >> 8);
writeByte(i);
}
public void writeShortLE(int i) {
writeByte(i);
writeByte(i >> 8);
}
public void write24BitInteger(int i) {
writeByte(i >> 16);
writeByte(i >> 8);
writeByte(i);
}
public void write24BitIntegerV2(int i) {
writeByte(i >> 16);
writeByte(i);
writeByte(i >> 8);
}
public void writeInt(int i) {
writeByte(i >> 24);
writeByte(i >> 16);
writeByte(i >> 8);
writeByte(i);
}
public void writeIntV1(int i) {
writeByte(i >> 8);
writeByte(i);
writeByte(i >> 24);
writeByte(i >> 16);
}
public void writeIntV2(int i) {
writeByte(i >> 16);
writeByte(i >> 24);
writeByte(i);
writeByte(i >> 8);
}
public void writeIntLE(int i) {
writeByte(i);
writeByte(i >> 8);
writeByte(i >> 16);
writeByte(i >> 24);
}
public void writeLong(long l) {
writeByte((int) (l >> 56));
writeByte((int) (l >> 48));
writeByte((int) (l >> 40));
writeByte((int) (l >> 32));
writeByte((int) (l >> 24));
writeByte((int) (l >> 16));
writeByte((int) (l >> 8));
writeByte((int) l);
}
public void writePSmarts(int i) {
if (i < 128) {
writeByte(i);
return;
}
if (i < 32768) {
writeShort(32768 + i);
return;
} else {
System.out.println("Error psmarts out of range:");
return;
}
}
public void writeString(String s) {
checkCapacityPosition(getOffset() + s.length() + 1);
System.arraycopy(s.getBytes(), 0, getBuffer(), getOffset(), s.length());
setOffset(getOffset() + s.length());
writeByte(0);
}
public void writeGJString2(String string) {
byte[] packed = new byte[256];
int length = Utils.packGJString2(0, packed, string);
writeByte(0);
writeBytes(packed, 0, length);
writeByte(0);
}
public void writeGJString(String s) {
writeByte(0);
writeString(s);
}
public void putGJString3(String s) {
writeByte(0);
writeString(s);
writeByte(0);
}
public void writePacket(Player player, int id) {
if(player == null)
writeSmart(id);
else if(id >= 128) {
writeByte((id >> 8) + 128 + player.getIsaacKeyPair().outKey().getNextValue());
writeByte(id + player.getIsaacKeyPair().outKey().getNextValue());
}else
writeByte(id + player.getIsaacKeyPair().outKey().getNextValue());
}
public void writePacketVarByte(Player player, int id) {
writePacket(player, id);
writeByte(0);
opcodeStart = getOffset() - 1;
}
public void writePacketVarShort(Player player, int id) {
writePacket(player, id);
writeShort(0);
opcodeStart = getOffset() - 2;
}
public void endPacketVarByte() {
writeByte(getOffset() - (opcodeStart + 2) + 1, opcodeStart);
}
public void endPacketVarShort() {
int size = getOffset() - (opcodeStart + 2);
writeByte(size >> 8, opcodeStart++);
writeByte(size, opcodeStart);
}
public void initBitAccess() {
bitPosition = getOffset() * 8;
}
public void finishBitAccess() {
setOffset((bitPosition + 7) / 8);
}
public int getBitPos(int i) {
return 8 * i - bitPosition;
}
public void writeBits(int numBits, int value) {
int bytePos = bitPosition >> 3;
int bitOffset = 8 - (bitPosition & 7);
bitPosition += numBits;
for (; numBits > bitOffset; bitOffset = 8) {
checkCapacityPosition(bytePos);
getBuffer()[bytePos] &= ~BIT_MASK[bitOffset];
getBuffer()[bytePos++] |= value >> numBits - bitOffset
& BIT_MASK[bitOffset];
numBits -= bitOffset;
}
checkCapacityPosition(bytePos);
if (numBits == bitOffset) {
getBuffer()[bytePos] &= ~BIT_MASK[bitOffset];
getBuffer()[bytePos] |= value & BIT_MASK[bitOffset];
} else {
getBuffer()[bytePos] &= ~(BIT_MASK[numBits] << bitOffset - numBits);
getBuffer()[bytePos] |= (value & BIT_MASK[numBits]) << bitOffset
- numBits;
}
}
public void setBuffer(byte[] buffer) {
this.buffer = buffer;
}
}
| |
// ========================================================================
// Copyright (c) 2003-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.security;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.eclipse.jetty.http.security.Credential;
import org.eclipse.jetty.server.UserIdentity;
import org.eclipse.jetty.util.Loader;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.resource.Resource;
/* ------------------------------------------------------------ */
/**
* HashMapped User Realm with JDBC as data source. JDBCLoginService extends
* HashULoginService and adds a method to fetch user information from database.
* The login() method checks the inherited Map for the user. If the user is not
* found, it will fetch details from the database and populate the inherited
* Map. It then calls the superclass login() method to perform the actual
* authentication. Periodically (controlled by configuration parameter),
* internal hashes are cleared. Caching can be disabled by setting cache refresh
* interval to zero. Uses one database connection that is initialized at
* startup. Reconnect on failures. authenticate() is 'synchronized'.
*
* An example properties file for configuration is in
* $JETTY_HOME/etc/jdbcRealm.properties
*
* @version $Id: JDBCLoginService.java 4792 2009-03-18 21:55:52Z gregw $
*
*
*
*
*/
public class JDBCLoginService extends MappedLoginService
{
private static final Logger LOG = Log.getLogger(JDBCLoginService.class);
private String _config;
private String _jdbcDriver;
private String _url;
private String _userName;
private String _password;
private String _userTableKey;
private String _userTablePasswordField;
private String _roleTableRoleField;
private int _cacheTime;
private long _lastHashPurge;
private Connection _con;
private String _userSql;
private String _roleSql;
/* ------------------------------------------------------------ */
public JDBCLoginService()
throws IOException
{
}
/* ------------------------------------------------------------ */
public JDBCLoginService(String name)
throws IOException
{
setName(name);
}
/* ------------------------------------------------------------ */
public JDBCLoginService(String name, String config)
throws IOException
{
setName(name);
setConfig(config);
}
/* ------------------------------------------------------------ */
public JDBCLoginService(String name, IdentityService identityService, String config)
throws IOException
{
setName(name);
setIdentityService(identityService);
setConfig(config);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.security.MappedLoginService#doStart()
*/
@Override
protected void doStart() throws Exception
{
Properties properties = new Properties();
Resource resource = Resource.newResource(_config);
properties.load(resource.getInputStream());
_jdbcDriver = properties.getProperty("jdbcdriver");
_url = properties.getProperty("url");
_userName = properties.getProperty("username");
_password = properties.getProperty("password");
String _userTable = properties.getProperty("usertable");
_userTableKey = properties.getProperty("usertablekey");
String _userTableUserField = properties.getProperty("usertableuserfield");
_userTablePasswordField = properties.getProperty("usertablepasswordfield");
String _roleTable = properties.getProperty("roletable");
String _roleTableKey = properties.getProperty("roletablekey");
_roleTableRoleField = properties.getProperty("roletablerolefield");
String _userRoleTable = properties.getProperty("userroletable");
String _userRoleTableUserKey = properties.getProperty("userroletableuserkey");
String _userRoleTableRoleKey = properties.getProperty("userroletablerolekey");
_cacheTime = new Integer(properties.getProperty("cachetime"));
if (_jdbcDriver == null || _jdbcDriver.equals("")
|| _url == null
|| _url.equals("")
|| _userName == null
|| _userName.equals("")
|| _password == null
|| _cacheTime < 0)
{
LOG.warn("UserRealm " + getName() + " has not been properly configured");
}
_cacheTime *= 1000;
_lastHashPurge = 0;
_userSql = "select " + _userTableKey + "," + _userTablePasswordField + " from " + _userTable + " where " + _userTableUserField + " = ?";
_roleSql = "select r." + _roleTableRoleField
+ " from "
+ _roleTable
+ " r, "
+ _userRoleTable
+ " u where u."
+ _userRoleTableUserKey
+ " = ?"
+ " and r."
+ _roleTableKey
+ " = u."
+ _userRoleTableRoleKey;
Loader.loadClass(this.getClass(), _jdbcDriver).newInstance();
super.doStart();
}
/* ------------------------------------------------------------ */
public String getConfig()
{
return _config;
}
/* ------------------------------------------------------------ */
/**
* Load JDBC connection configuration from properties file.
*
* @param config Filename or url of user properties file.
*/
public void setConfig(String config)
{
if (isRunning())
throw new IllegalStateException("Running");
_config=config;
}
/* ------------------------------------------------------------ */
/**
* (re)Connect to database with parameters setup by loadConfig()
*/
public void connectDatabase()
{
try
{
Class.forName(_jdbcDriver);
_con = DriverManager.getConnection(_url, _userName, _password);
}
catch (SQLException e)
{
LOG.warn("UserRealm " + getName() + " could not connect to database; will try later", e);
}
catch (ClassNotFoundException e)
{
LOG.warn("UserRealm " + getName() + " could not connect to database; will try later", e);
}
}
/* ------------------------------------------------------------ */
@Override
public UserIdentity login(String username, Object credentials)
{
long now = System.currentTimeMillis();
if (now - _lastHashPurge > _cacheTime || _cacheTime == 0)
{
_users.clear();
_lastHashPurge = now;
closeConnection();
}
return super.login(username,credentials);
}
/* ------------------------------------------------------------ */
@Override
protected void loadUsers()
{
}
/* ------------------------------------------------------------ */
@Override
protected UserIdentity loadUser(String username)
{
try
{
if (null == _con)
connectDatabase();
if (null == _con)
throw new SQLException("Can't connect to database");
PreparedStatement stat = _con.prepareStatement(_userSql);
stat.setObject(1, username);
ResultSet rs = stat.executeQuery();
if (rs.next())
{
int key = rs.getInt(_userTableKey);
String credentials = rs.getString(_userTablePasswordField);
stat.close();
stat = _con.prepareStatement(_roleSql);
stat.setInt(1, key);
rs = stat.executeQuery();
List<String> roles = new ArrayList<String>();
while (rs.next())
roles.add(rs.getString(_roleTableRoleField));
stat.close();
return putUser(username, Credential.getCredential(credentials),roles.toArray(new String[roles.size()]));
}
}
catch (SQLException e)
{
LOG.warn("UserRealm " + getName() + " could not load user information from database", e);
closeConnection();
}
return null;
}
/**
* Close an existing connection
*/
private void closeConnection ()
{
if (_con != null)
{
if (LOG.isDebugEnabled()) LOG.debug("Closing db connection for JDBCUserRealm");
try { _con.close(); }catch (Exception e) {LOG.ignore(e);}
}
_con = null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.hadoop;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.format.converter.ParquetMetadataConverter;
import org.apache.parquet.hadoop.ParquetOutputFormat.JobSummaryLevel;
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.MessageType;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestMergeMetadataFiles {
@Rule
public TemporaryFolder temp = new TemporaryFolder();
private static final MessageType schema = parseMessageType(
"message test { "
+ "required binary binary_field; "
+ "required int32 int32_field; "
+ "required int64 int64_field; "
+ "required boolean boolean_field; "
+ "required float float_field; "
+ "required double double_field; "
+ "required fixed_len_byte_array(3) flba_field; "
+ "required int96 int96_field; "
+ "} ");
// schema1 with a field removed
private static final MessageType schema2 = parseMessageType(
"message test { "
+ "required binary binary_field; "
+ "required int32 int32_field; "
+ "required int64 int64_field; "
+ "required boolean boolean_field; "
+ "required float float_field; "
+ "required double double_field; "
+ "required fixed_len_byte_array(3) flba_field; "
+ "} ");
private static void writeFile(File out, Configuration conf, boolean useSchema2) throws IOException {
if (!useSchema2) {
GroupWriteSupport.setSchema(schema, conf);
} else {
GroupWriteSupport.setSchema(schema2, conf);
}
SimpleGroupFactory f = new SimpleGroupFactory(schema);
Map<String, String> extraMetaData = new HashMap<String, String>();
extraMetaData.put("schema_num", useSchema2 ? "2" : "1" );
ParquetWriter<Group> writer = ExampleParquetWriter
.builder(new Path(out.getAbsolutePath()))
.withConf(conf)
.withExtraMetaData(extraMetaData)
.build();
for (int i = 0; i < 1000; i++) {
Group g = f.newGroup()
.append("binary_field", "test" + i)
.append("int32_field", i)
.append("int64_field", (long) i)
.append("boolean_field", i % 2 == 0)
.append("float_field", (float) i)
.append("double_field", (double)i)
.append("flba_field", "foo");
if (!useSchema2) {
g = g.append("int96_field", Binary.fromConstantByteArray(new byte[12]));
}
writer.write(g);
}
writer.close();
}
private static class WrittenFileInfo {
public Configuration conf;
public Path metaPath1;
public Path metaPath2;
public Path commonMetaPath1;
public Path commonMetaPath2;
}
private WrittenFileInfo writeFiles(boolean mixedSchemas) throws Exception {
WrittenFileInfo info = new WrittenFileInfo();
Configuration conf = new Configuration();
info.conf = conf;
File root1 = new File(temp.getRoot(), "out1");
File root2 = new File(temp.getRoot(), "out2");
Path rootPath1 = new Path(root1.getAbsolutePath());
Path rootPath2 = new Path(root2.getAbsolutePath());
for (int i = 0; i < 10; i++) {
writeFile(new File(root1, i + ".parquet"), conf, true);
}
List<Footer> footers = ParquetFileReader.readFooters(conf, rootPath1.getFileSystem(conf).getFileStatus(rootPath1), false);
ParquetFileWriter.writeMetadataFile(conf, rootPath1, footers, JobSummaryLevel.ALL);
for (int i = 0; i < 7; i++) {
writeFile(new File(root2, i + ".parquet"), conf, !mixedSchemas);
}
footers = ParquetFileReader.readFooters(conf, rootPath2.getFileSystem(conf).getFileStatus(rootPath2), false);
ParquetFileWriter.writeMetadataFile(conf, rootPath2, footers, JobSummaryLevel.ALL);
info.commonMetaPath1 = new Path(new File(root1, ParquetFileWriter.PARQUET_COMMON_METADATA_FILE).getAbsolutePath());
info.commonMetaPath2 = new Path(new File(root2, ParquetFileWriter.PARQUET_COMMON_METADATA_FILE).getAbsolutePath());
info.metaPath1 = new Path(new File(root1, ParquetFileWriter.PARQUET_METADATA_FILE).getAbsolutePath());
info.metaPath2 = new Path(new File(root2, ParquetFileWriter.PARQUET_METADATA_FILE).getAbsolutePath());
return info;
}
@Test
public void testMergeMetadataFiles() throws Exception {
WrittenFileInfo info = writeFiles(false);
ParquetMetadata commonMeta1 = ParquetFileReader.readFooter(info.conf, info.commonMetaPath1, ParquetMetadataConverter.NO_FILTER);
ParquetMetadata commonMeta2 = ParquetFileReader.readFooter(info.conf, info.commonMetaPath2, ParquetMetadataConverter.NO_FILTER);
ParquetMetadata meta1 = ParquetFileReader.readFooter(info.conf, info.metaPath1, ParquetMetadataConverter.NO_FILTER);
ParquetMetadata meta2 = ParquetFileReader.readFooter(info.conf, info.metaPath2, ParquetMetadataConverter.NO_FILTER);
assertTrue(commonMeta1.getBlocks().isEmpty());
assertTrue(commonMeta2.getBlocks().isEmpty());
assertEquals(commonMeta1.getFileMetaData().getSchema(), commonMeta2.getFileMetaData().getSchema());
assertFalse(meta1.getBlocks().isEmpty());
assertFalse(meta2.getBlocks().isEmpty());
assertEquals(meta1.getFileMetaData().getSchema(), meta2.getFileMetaData().getSchema());
assertEquals(commonMeta1.getFileMetaData().getKeyValueMetaData(), commonMeta2.getFileMetaData().getKeyValueMetaData());
assertEquals(meta1.getFileMetaData().getKeyValueMetaData(), meta2.getFileMetaData().getKeyValueMetaData());
// test file serialization
Path mergedOut = new Path(new File(temp.getRoot(), "merged_meta").getAbsolutePath());
Path mergedCommonOut = new Path(new File(temp.getRoot(), "merged_common_meta").getAbsolutePath());
ParquetFileWriter.writeMergedMetadataFile(Arrays.asList(info.metaPath1, info.metaPath2), mergedOut, info.conf);
ParquetFileWriter.writeMergedMetadataFile(Arrays.asList(info.commonMetaPath1, info.commonMetaPath2), mergedCommonOut, info.conf);
ParquetMetadata mergedMeta = ParquetFileReader.readFooter(info.conf, mergedOut, ParquetMetadataConverter.NO_FILTER);
ParquetMetadata mergedCommonMeta = ParquetFileReader.readFooter(info.conf, mergedCommonOut, ParquetMetadataConverter.NO_FILTER);
// ideally we'd assert equality here, but BlockMetaData and it's references don't implement equals
assertEquals(meta1.getBlocks().size() + meta2.getBlocks().size(), mergedMeta.getBlocks().size());
assertTrue(mergedCommonMeta.getBlocks().isEmpty());
assertEquals(meta1.getFileMetaData().getSchema(), mergedMeta.getFileMetaData().getSchema());
assertEquals(commonMeta1.getFileMetaData().getSchema(), mergedCommonMeta.getFileMetaData().getSchema());
assertEquals(meta1.getFileMetaData().getKeyValueMetaData(), mergedMeta.getFileMetaData().getKeyValueMetaData());
assertEquals(commonMeta1.getFileMetaData().getKeyValueMetaData(), mergedCommonMeta.getFileMetaData().getKeyValueMetaData());
}
@Test
public void testThrowsWhenIncompatible() throws Exception {
WrittenFileInfo info = writeFiles(true);
Path mergedOut = new Path(new File(temp.getRoot(), "merged_meta").getAbsolutePath());
Path mergedCommonOut = new Path(new File(temp.getRoot(), "merged_common_meta").getAbsolutePath());
try {
ParquetFileWriter.writeMergedMetadataFile(Arrays.asList(info.metaPath1, info.metaPath2), mergedOut, info.conf);
fail("this should throw");
} catch (RuntimeException e) {
assertEquals("could not merge metadata: key schema_num has conflicting values: [2, 1]", e.getMessage());
}
try {
ParquetFileWriter.writeMergedMetadataFile(Arrays.asList(info.commonMetaPath1, info.commonMetaPath2), mergedCommonOut, info.conf);
fail("this should throw");
} catch (RuntimeException e) {
assertEquals("could not merge metadata: key schema_num has conflicting values: [2, 1]", e.getMessage());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.shortcircuit;
import java.io.BufferedOutputStream;
import java.io.Closeable;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.lang.mutable.MutableBoolean;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.ExtendedBlockId;
import org.apache.hadoop.hdfs.net.DomainPeer;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
import org.apache.hadoop.hdfs.protocol.datatransfer.Sender;
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ShortCircuitShmResponseProto;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot;
import org.apache.hadoop.net.unix.DomainSocket;
import org.apache.hadoop.net.unix.DomainSocketWatcher;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages short-circuit memory segments for an HDFS client.
*
* Clients are responsible for requesting and releasing shared memory segments used
* for communicating with the DataNode. The client will try to allocate new slots
* in the set of existing segments, falling back to getting a new segment from the
* DataNode via {@link DataTransferProtocol#requestShortCircuitFds}.
*
* The counterpart to this class on the DataNode is {@link ShortCircuitRegistry}.
* See {@link ShortCircuitRegistry} for more information on the communication protocol.
*/
@InterfaceAudience.Private
public class DfsClientShmManager implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(
DfsClientShmManager.class);
/**
* Manages short-circuit memory segments that pertain to a given DataNode.
*/
class EndpointShmManager {
/**
* The datanode we're managing.
*/
private final DatanodeInfo datanode;
/**
* Shared memory segments which have no empty slots.
*
* Protected by the manager lock.
*/
private final TreeMap<ShmId, DfsClientShm> full =
new TreeMap<ShmId, DfsClientShm>();
/**
* Shared memory segments which have at least one empty slot.
*
* Protected by the manager lock.
*/
private final TreeMap<ShmId, DfsClientShm> notFull =
new TreeMap<ShmId, DfsClientShm>();
/**
* True if this datanode doesn't support short-circuit shared memory
* segments.
*
* Protected by the manager lock.
*/
private boolean disabled = false;
/**
* True if we're in the process of loading a shared memory segment from
* this DataNode.
*
* Protected by the manager lock.
*/
private boolean loading = false;
EndpointShmManager (DatanodeInfo datanode) {
this.datanode = datanode;
}
/**
* Pull a slot out of a preexisting shared memory segment.
*
* Must be called with the manager lock held.
*
* @param blockId The blockId to put inside the Slot object.
*
* @return null if none of our shared memory segments contain a
* free slot; the slot object otherwise.
*/
private Slot allocSlotFromExistingShm(ExtendedBlockId blockId) {
if (notFull.isEmpty()) {
return null;
}
Entry<ShmId, DfsClientShm> entry = notFull.firstEntry();
DfsClientShm shm = entry.getValue();
ShmId shmId = shm.getShmId();
Slot slot = shm.allocAndRegisterSlot(blockId);
if (shm.isFull()) {
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": pulled the last slot " + slot.getSlotIdx() +
" out of " + shm);
}
DfsClientShm removedShm = notFull.remove(shmId);
Preconditions.checkState(removedShm == shm);
full.put(shmId, shm);
} else {
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": pulled slot " + slot.getSlotIdx() +
" out of " + shm);
}
}
return slot;
}
/**
* Ask the DataNode for a new shared memory segment. This function must be
* called with the manager lock held. We will release the lock while
* communicating with the DataNode.
*
* @param clientName The current client name.
* @param peer The peer to use to talk to the DataNode.
*
* @return Null if the DataNode does not support shared memory
* segments, or experienced an error creating the
* shm. The shared memory segment itself on success.
* @throws IOException If there was an error communicating over the socket.
* We will not throw an IOException unless the socket
* itself (or the network) is the problem.
*/
private DfsClientShm requestNewShm(String clientName, DomainPeer peer)
throws IOException {
final DataOutputStream out =
new DataOutputStream(
new BufferedOutputStream(peer.getOutputStream()));
new Sender(out).requestShortCircuitShm(clientName);
ShortCircuitShmResponseProto resp =
ShortCircuitShmResponseProto.parseFrom(
PBHelperClient.vintPrefixed(peer.getInputStream()));
String error = resp.hasError() ? resp.getError() : "(unknown)";
switch (resp.getStatus()) {
case SUCCESS:
DomainSocket sock = peer.getDomainSocket();
byte buf[] = new byte[1];
FileInputStream fis[] = new FileInputStream[1];
if (sock.recvFileInputStreams(fis, buf, 0, buf.length) < 0) {
throw new EOFException("got EOF while trying to transfer the " +
"file descriptor for the shared memory segment.");
}
if (fis[0] == null) {
throw new IOException("the datanode " + datanode + " failed to " +
"pass a file descriptor for the shared memory segment.");
}
try {
DfsClientShm shm =
new DfsClientShm(PBHelperClient.convert(resp.getId()),
fis[0], this, peer);
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": createNewShm: created " + shm);
}
return shm;
} finally {
try {
fis[0].close();
} catch (Throwable e) {
LOG.debug("Exception in closing " + fis[0], e);
}
}
case ERROR_UNSUPPORTED:
// The DataNode just does not support short-circuit shared memory
// access, and we should stop asking.
LOG.info(this + ": datanode does not support short-circuit " +
"shared memory access: " + error);
disabled = true;
return null;
default:
// The datanode experienced some kind of unexpected error when trying to
// create the short-circuit shared memory segment.
LOG.warn(this + ": error requesting short-circuit shared memory " +
"access: " + error);
return null;
}
}
/**
* Allocate a new shared memory slot connected to this datanode.
*
* Must be called with the EndpointShmManager lock held.
*
* @param peer The peer to use to talk to the DataNode.
* @param usedPeer (out param) Will be set to true if we used the peer.
* When a peer is used
*
* @param clientName The client name.
* @param blockId The block ID to use.
* @return null if the DataNode does not support shared memory
* segments, or experienced an error creating the
* shm. The shared memory segment itself on success.
* @throws IOException If there was an error communicating over the socket.
*/
Slot allocSlot(DomainPeer peer, MutableBoolean usedPeer,
String clientName, ExtendedBlockId blockId) throws IOException {
while (true) {
if (closed) {
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": the DfsClientShmManager has been closed.");
}
return null;
}
if (disabled) {
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": shared memory segment access is disabled.");
}
return null;
}
// Try to use an existing slot.
Slot slot = allocSlotFromExistingShm(blockId);
if (slot != null) {
return slot;
}
// There are no free slots. If someone is loading more slots, wait
// for that to finish.
if (loading) {
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": waiting for loading to finish...");
}
finishedLoading.awaitUninterruptibly();
} else {
// Otherwise, load the slot ourselves.
loading = true;
lock.unlock();
DfsClientShm shm;
try {
shm = requestNewShm(clientName, peer);
if (shm == null) continue;
// See #{DfsClientShmManager#domainSocketWatcher} for details
// about why we do this before retaking the manager lock.
domainSocketWatcher.add(peer.getDomainSocket(), shm);
// The DomainPeer is now our responsibility, and should not be
// closed by the caller.
usedPeer.setValue(true);
} finally {
lock.lock();
loading = false;
finishedLoading.signalAll();
}
if (shm.isDisconnected()) {
// If the peer closed immediately after the shared memory segment
// was created, the DomainSocketWatcher callback might already have
// fired and marked the shm as disconnected. In this case, we
// obviously don't want to add the SharedMemorySegment to our list
// of valid not-full segments.
if (LOG.isDebugEnabled()) {
LOG.debug(this + ": the UNIX domain socket associated with " +
"this short-circuit memory closed before we could make " +
"use of the shm.");
}
} else {
notFull.put(shm.getShmId(), shm);
}
}
}
}
/**
* Stop tracking a slot.
*
* Must be called with the EndpointShmManager lock held.
*
* @param slot The slot to release.
*/
void freeSlot(Slot slot) {
DfsClientShm shm = (DfsClientShm)slot.getShm();
shm.unregisterSlot(slot.getSlotIdx());
if (shm.isDisconnected()) {
// Stale shared memory segments should not be tracked here.
Preconditions.checkState(!full.containsKey(shm.getShmId()));
Preconditions.checkState(!notFull.containsKey(shm.getShmId()));
if (shm.isEmpty()) {
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": freeing empty stale " + shm);
}
shm.free();
}
} else {
ShmId shmId = shm.getShmId();
full.remove(shmId); // The shm can't be full if we just freed a slot.
if (shm.isEmpty()) {
notFull.remove(shmId);
// If the shared memory segment is now empty, we call shutdown(2) on
// the UNIX domain socket associated with it. The DomainSocketWatcher,
// which is watching this socket, will call DfsClientShm#handle,
// cleaning up this shared memory segment.
//
// See #{DfsClientShmManager#domainSocketWatcher} for details about why
// we don't want to call DomainSocketWatcher#remove directly here.
//
// Note that we could experience 'fragmentation' here, where the
// DFSClient allocates a bunch of slots in different shared memory
// segments, and then frees most of them, but never fully empties out
// any segment. We make some attempt to avoid this fragmentation by
// always allocating new slots out of the shared memory segment with the
// lowest ID, but it could still occur. In most workloads,
// fragmentation should not be a major concern, since it doesn't impact
// peak file descriptor usage or the speed of allocation.
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": shutting down UNIX domain socket for " +
"empty " + shm);
}
shutdown(shm);
} else {
notFull.put(shmId, shm);
}
}
}
/**
* Unregister a shared memory segment.
*
* Once a segment is unregistered, we will not allocate any more slots
* inside that segment.
*
* The DomainSocketWatcher calls this while holding the DomainSocketWatcher
* lock.
*
* @param shmId The ID of the shared memory segment to unregister.
*/
void unregisterShm(ShmId shmId) {
lock.lock();
try {
full.remove(shmId);
notFull.remove(shmId);
} finally {
lock.unlock();
}
}
@Override
public String toString() {
return String.format("EndpointShmManager(%s, parent=%s)",
datanode, DfsClientShmManager.this);
}
PerDatanodeVisitorInfo getVisitorInfo() {
return new PerDatanodeVisitorInfo(full, notFull, disabled);
}
final void shutdown(DfsClientShm shm) {
try {
shm.getPeer().getDomainSocket().shutdown();
} catch (IOException e) {
LOG.warn(this + ": error shutting down shm: got IOException calling " +
"shutdown(SHUT_RDWR)", e);
}
}
}
private boolean closed = false;
private final ReentrantLock lock = new ReentrantLock();
/**
* A condition variable which is signalled when we finish loading a segment
* from the Datanode.
*/
private final Condition finishedLoading = lock.newCondition();
/**
* Information about each Datanode.
*/
private final HashMap<DatanodeInfo, EndpointShmManager> datanodes =
new HashMap<DatanodeInfo, EndpointShmManager>(1);
/**
* The DomainSocketWatcher which keeps track of the UNIX domain socket
* associated with each shared memory segment.
*
* Note: because the DomainSocketWatcher makes callbacks into this
* DfsClientShmManager object, you must MUST NOT attempt to take the
* DomainSocketWatcher lock while holding the DfsClientShmManager lock,
* or else deadlock might result. This means that most DomainSocketWatcher
* methods are off-limits unless you release the manager lock first.
*/
private final DomainSocketWatcher domainSocketWatcher;
DfsClientShmManager(int interruptCheckPeriodMs) throws IOException {
this.domainSocketWatcher = new DomainSocketWatcher(interruptCheckPeriodMs,
"client");
}
public Slot allocSlot(DatanodeInfo datanode, DomainPeer peer,
MutableBoolean usedPeer, ExtendedBlockId blockId,
String clientName) throws IOException {
lock.lock();
try {
if (closed) {
LOG.trace(this + ": the DfsClientShmManager isclosed.");
return null;
}
EndpointShmManager shmManager = datanodes.get(datanode);
if (shmManager == null) {
shmManager = new EndpointShmManager(datanode);
datanodes.put(datanode, shmManager);
}
return shmManager.allocSlot(peer, usedPeer, clientName, blockId);
} finally {
lock.unlock();
}
}
public void freeSlot(Slot slot) {
lock.lock();
try {
DfsClientShm shm = (DfsClientShm)slot.getShm();
shm.getEndpointShmManager().freeSlot(slot);
} finally {
lock.unlock();
}
}
@VisibleForTesting
public static class PerDatanodeVisitorInfo {
public final TreeMap<ShmId, DfsClientShm> full;
public final TreeMap<ShmId, DfsClientShm> notFull;
public final boolean disabled;
PerDatanodeVisitorInfo(TreeMap<ShmId, DfsClientShm> full,
TreeMap<ShmId, DfsClientShm> notFull, boolean disabled) {
this.full = full;
this.notFull = notFull;
this.disabled = disabled;
}
}
@VisibleForTesting
public interface Visitor {
void visit(HashMap<DatanodeInfo, PerDatanodeVisitorInfo> info)
throws IOException;
}
@VisibleForTesting
public void visit(Visitor visitor) throws IOException {
lock.lock();
try {
HashMap<DatanodeInfo, PerDatanodeVisitorInfo> info =
new HashMap<DatanodeInfo, PerDatanodeVisitorInfo>();
for (Entry<DatanodeInfo, EndpointShmManager> entry :
datanodes.entrySet()) {
info.put(entry.getKey(), entry.getValue().getVisitorInfo());
}
visitor.visit(info);
} finally {
lock.unlock();
}
}
/**
* Close the DfsClientShmManager.
*/
@Override
public void close() throws IOException {
lock.lock();
try {
if (closed) return;
closed = true;
} finally {
lock.unlock();
}
// When closed, the domainSocketWatcher will issue callbacks that mark
// all the outstanding DfsClientShm segments as stale.
try {
domainSocketWatcher.close();
} catch (Throwable e) {
LOG.debug("Exception in closing " + domainSocketWatcher, e);
}
}
@Override
public String toString() {
return String.format("ShortCircuitShmManager(%08x)",
System.identityHashCode(this));
}
@VisibleForTesting
public DomainSocketWatcher getDomainSocketWatcher() {
return domainSocketWatcher;
}
}
| |
// Template Source: BaseEntity.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.models;
import com.microsoft.graph.serializer.ISerializer;
import com.microsoft.graph.serializer.IJsonBackedObject;
import com.microsoft.graph.serializer.AdditionalDataManager;
import java.util.EnumSet;
import com.microsoft.graph.http.BaseCollectionPage;
import com.microsoft.graph.models.TeamFunSettings;
import com.microsoft.graph.models.TeamGuestSettings;
import com.microsoft.graph.models.TeamMemberSettings;
import com.microsoft.graph.models.TeamMessagingSettings;
import com.microsoft.graph.models.TeamSpecialization;
import com.microsoft.graph.models.TeamVisibilityType;
import com.microsoft.graph.models.Channel;
import com.microsoft.graph.models.Group;
import com.microsoft.graph.models.TeamsAppInstallation;
import com.microsoft.graph.models.ConversationMember;
import com.microsoft.graph.models.TeamsAsyncOperation;
import com.microsoft.graph.models.TeamsTemplate;
import com.microsoft.graph.models.Schedule;
import com.microsoft.graph.models.Entity;
import com.microsoft.graph.requests.ChannelCollectionPage;
import com.microsoft.graph.requests.TeamsAppInstallationCollectionPage;
import com.microsoft.graph.requests.ConversationMemberCollectionPage;
import com.microsoft.graph.requests.TeamsAsyncOperationCollectionPage;
import com.google.gson.JsonObject;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Team.
*/
public class Team extends Entity implements IJsonBackedObject {
/**
* The Classification.
* An optional label. Typically describes the data or business sensitivity of the team. Must match one of a pre-configured set in the tenant's directory.
*/
@SerializedName(value = "classification", alternate = {"Classification"})
@Expose
@Nullable
public String classification;
/**
* The Created Date Time.
* Timestamp at which the team was created.
*/
@SerializedName(value = "createdDateTime", alternate = {"CreatedDateTime"})
@Expose
@Nullable
public java.time.OffsetDateTime createdDateTime;
/**
* The Description.
* An optional description for the team. Maximum length: 1024 characters.
*/
@SerializedName(value = "description", alternate = {"Description"})
@Expose
@Nullable
public String description;
/**
* The Display Name.
* The name of the team.
*/
@SerializedName(value = "displayName", alternate = {"DisplayName"})
@Expose
@Nullable
public String displayName;
/**
* The Fun Settings.
* Settings to configure use of Giphy, memes, and stickers in the team.
*/
@SerializedName(value = "funSettings", alternate = {"FunSettings"})
@Expose
@Nullable
public TeamFunSettings funSettings;
/**
* The Guest Settings.
* Settings to configure whether guests can create, update, or delete channels in the team.
*/
@SerializedName(value = "guestSettings", alternate = {"GuestSettings"})
@Expose
@Nullable
public TeamGuestSettings guestSettings;
/**
* The Internal Id.
* A unique ID for the team that has been used in a few places such as the audit log/Office 365 Management Activity API.
*/
@SerializedName(value = "internalId", alternate = {"InternalId"})
@Expose
@Nullable
public String internalId;
/**
* The Is Archived.
* Whether this team is in read-only mode.
*/
@SerializedName(value = "isArchived", alternate = {"IsArchived"})
@Expose
@Nullable
public Boolean isArchived;
/**
* The Member Settings.
* Settings to configure whether members can perform certain actions, for example, create channels and add bots, in the team.
*/
@SerializedName(value = "memberSettings", alternate = {"MemberSettings"})
@Expose
@Nullable
public TeamMemberSettings memberSettings;
/**
* The Messaging Settings.
* Settings to configure messaging and mentions in the team.
*/
@SerializedName(value = "messagingSettings", alternate = {"MessagingSettings"})
@Expose
@Nullable
public TeamMessagingSettings messagingSettings;
/**
* The Specialization.
* Optional. Indicates whether the team is intended for a particular use case. Each team specialization has access to unique behaviors and experiences targeted to its use case.
*/
@SerializedName(value = "specialization", alternate = {"Specialization"})
@Expose
@Nullable
public TeamSpecialization specialization;
/**
* The Visibility.
* The visibility of the group and team. Defaults to Public.
*/
@SerializedName(value = "visibility", alternate = {"Visibility"})
@Expose
@Nullable
public TeamVisibilityType visibility;
/**
* The Web Url.
* A hyperlink that will go to the team in the Microsoft Teams client. This is the URL that you get when you right-click a team in the Microsoft Teams client and select Get link to team. This URL should be treated as an opaque blob, and not parsed.
*/
@SerializedName(value = "webUrl", alternate = {"WebUrl"})
@Expose
@Nullable
public String webUrl;
/**
* The Channels.
* The collection of channels and messages associated with the team.
*/
@SerializedName(value = "channels", alternate = {"Channels"})
@Expose
@Nullable
public ChannelCollectionPage channels;
/**
* The Group.
*
*/
@SerializedName(value = "group", alternate = {"Group"})
@Expose
@Nullable
public Group group;
/**
* The Installed Apps.
* The apps installed in this team.
*/
@SerializedName(value = "installedApps", alternate = {"InstalledApps"})
@Expose
@Nullable
public TeamsAppInstallationCollectionPage installedApps;
/**
* The Members.
* Members and owners of the team.
*/
@SerializedName(value = "members", alternate = {"Members"})
@Expose
@Nullable
public ConversationMemberCollectionPage members;
/**
* The Operations.
* The async operations that ran or are running on this team.
*/
@SerializedName(value = "operations", alternate = {"Operations"})
@Expose
@Nullable
public TeamsAsyncOperationCollectionPage operations;
/**
* The Primary Channel.
* The general channel for the team.
*/
@SerializedName(value = "primaryChannel", alternate = {"PrimaryChannel"})
@Expose
@Nullable
public Channel primaryChannel;
/**
* The Template.
* The template this team was created from. See available templates.
*/
@SerializedName(value = "template", alternate = {"Template"})
@Expose
@Nullable
public TeamsTemplate template;
/**
* The Schedule.
* The schedule of shifts for this team.
*/
@SerializedName(value = "schedule", alternate = {"Schedule"})
@Expose
@Nullable
public Schedule schedule;
/**
* Sets the raw JSON object
*
* @param serializer the serializer
* @param json the JSON object to set this object to
*/
public void setRawObject(@Nonnull final ISerializer serializer, @Nonnull final JsonObject json) {
if (json.has("channels")) {
channels = serializer.deserializeObject(json.get("channels"), ChannelCollectionPage.class);
}
if (json.has("installedApps")) {
installedApps = serializer.deserializeObject(json.get("installedApps"), TeamsAppInstallationCollectionPage.class);
}
if (json.has("members")) {
members = serializer.deserializeObject(json.get("members"), ConversationMemberCollectionPage.class);
}
if (json.has("operations")) {
operations = serializer.deserializeObject(json.get("operations"), TeamsAsyncOperationCollectionPage.class);
}
}
}
| |
import static org.fest.assertions.Assertions.assertThat;
import static play.test.Helpers.fakeApplication;
import static play.test.Helpers.running;
import static play.test.Helpers.inMemoryDatabase;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import models.FieldUrl;
import models.Target;
import org.junit.Before;
import org.junit.Test;
import play.Configuration;
import play.Logger;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import uk.bl.Const.ScopeType;
import uk.bl.exception.ActException;
import uk.bl.exception.WhoisException;
import uk.bl.scope.Scope;
public class LicenseInheritanceTest {
private String url = "http://www.bl.uk/";
// private String url = "http://www.parliament.uk/";
// private String url = "https://www.gov.uk/";
// private String url = "http://www.somersetremembers.com/";
private static Boolean scopeHosting;
private static Boolean scopeDomain;
private static Boolean scopeRegistration;
private static Boolean scopeManual;
private static Boolean scopeLicense;
@Test
public void testLicenseInheritance() {
running(fakeApplication(inMemoryDatabase()), new Runnable() {
private Target addTarget(String title, String[] urls, ScopeType scope ) {
Target t = new Target();
t.title = title;
t.active = true;
t.fieldUrls = new ArrayList<FieldUrl>();
for( String nurl : urls ) {
try {
t.fieldUrls.add(new FieldUrl(nurl));
} catch (ActException e) {
throw(new RuntimeException(e));
}
}
if( scope != null )
t.scope = scope.name();
// And save it:
t.save();
// And also return:
return t;
}
@Override
public void run() {
/***************** Add some test data ******************/
// Clear out any existing data from other tests:
for( Target t : Target.findAll() ) {
t.delete();
}
// Add some particular targets:
Target bl = this.addTarget("British Library", new String[]{ "http://www.bl.uk" }, ScopeType.subdomains);
Target bln = this.addTarget("British Library News", new String[]{ "http://www.bl.uk/news/" }, ScopeType.subdomains);
Target bld = this.addTarget("British Library Datasets", new String[]{ "http://data.bl.uk/" }, ScopeType.root);
Target eg = this.addTarget("Example", new String[]{ "http://example.com/" }, ScopeType.subdomains);
Target egs = this.addTarget("Example Subdomain", new String[]{ "http://subdomain.example.com/" }, ScopeType.root);
Target egss = this.addTarget("Example Subsection", new String[]{ "http://example.com/subsection/" }, ScopeType.root);
Target egsss = this.addTarget("Example Subsubsection", new String[]{ "http://example.com/subsection/subsubsection/" }, ScopeType.root);
/***************** Perform some basic tests ******************/
assertThat(eg.isInScopeAllOrInheritedWithoutLicense()).isFalse();
assertThat(egs.isInScopeAllOrInheritedWithoutLicense()).isFalse();
assertThat(egss.isInScopeAllOrInheritedWithoutLicense()).isFalse();
eg.setProfessionalJudgement(true);
// If it was smarter, it would check that a professional reason was required.
//assertThat(eg.isInScopeAllWithoutLicense()).isFalse();
//assertThat(egs.isInScopeAllWithoutLicense()).isFalse();
//eg.setProfessionalJudgementExp("Because I say so!");
assertThat(eg.isInScopeAllOrInheritedWithoutLicense()).isTrue();
// This won't pick up yet:
assertThat(egs.isInScopeAllOrInheritedWithoutLicense()).isFalse();
assertThat(egss.isInScopeAllOrInheritedWithoutLicense()).isFalse();
eg.save();
egs.clearOverallLicenseStatusCache();
egss.clearOverallLicenseStatusCache();
// Now it should pick up the subdomain inheritance:
assertThat(egs.isInScopeAllOrInheritedWithoutLicense()).isTrue();
assertThat(egss.isInScopeAllOrInheritedWithoutLicense()).isTrue();
// Now switch to root scope and check the inheritance is lost:
eg.scope = ScopeType.root.name();
eg.save();
egs.clearOverallLicenseStatusCache();
egss.clearOverallLicenseStatusCache();
assertThat(egs.isInScopeAllOrInheritedWithoutLicense()).isFalse();
assertThat(egss.isInScopeAllOrInheritedWithoutLicense()).isTrue();
// Now check path inheritance:
eg.setProfessionalJudgement(false);
eg.save();
egss.setProfessionalJudgement(true);
egss.save();
egs.clearOverallLicenseStatusCache();
egss.clearOverallLicenseStatusCache();
assertThat(egs.isInScopeAllOrInheritedWithoutLicense()).isFalse();
assertThat(egs.isInScopeAllOrInheritedWithoutLicense()).isFalse();
assertThat(egss.isInScopeAllOrInheritedWithoutLicense()).isTrue();
assertThat(egsss.isInScopeAllOrInheritedWithoutLicense()).isTrue();
Logger.info("More fine-grained tests...");
/*****************Checking the NPLD scopes & Licensing of a given URL******************/
Target target = new Target();
target.fieldUrls = new ArrayList<FieldUrl>();
try {
target.fieldUrls.add(new FieldUrl(url));
} catch (ActException e1) {
Logger.error("Exception when creating test target with "+url);
}
scopeHosting = target.isUkHosting();
scopeDomain = target.isTopLevelDomain();
try {
scopeRegistration = target.isUkRegistration();
} catch (WhoisException e) {
throw(new RuntimeException(e));
}
scopeManual = target.checkManualScope();
scopeLicense = target.checkLicense();
// scopeLicense = target.indicateUkwaLicenceStatus();
// scopeLicense = target.indicateLicenses();
// scopeLicense = target.hasLicenses();
Logger.info("Scopes and Licensing::::::::::::::: " + target.fieldUrls + " - " + scopeHosting+ " - " + scopeDomain+ " - " + scopeRegistration+ " - " + scopeManual+ " - " +scopeLicense);
/***********Fetch the child URLs**************************/
List<Target> list = Target.filterUrl(url);
Logger.info("Number of child URLs::::::::::::::: " + list.size());
/****************Check child url scopes if the parent url is in scope*****************/
if(scopeHosting || scopeDomain || scopeRegistration || scopeManual){
Boolean scopeHosting = null;
Boolean scopeDomain = null;
Boolean scopeRegistration = null;
Boolean scopeManual = null;
for(int i=0; i<list.size(); i++){
Logger.info("Looking at: "+list.get(i).title);
scopeHosting = list.get(i).isUkHosting();
scopeDomain = list.get(i).isTopLevelDomain();
try {
scopeRegistration = list.get(i).isUkRegistration();
} catch (WhoisException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
scopeManual = list.get(i).checkManualScope();
Logger.info("In NPLD scope:URLs:::::::::: " + list.get(i).fieldUrls.get(0).url + " - " + scopeHosting+ " - " + scopeDomain+ " - " + scopeRegistration+ " - " + scopeManual);
assertThat(scopeHosting || scopeDomain || scopeRegistration || scopeManual).isEqualTo(true);
}
}
/****************Check child url licensing if the parent url have license*****************/
if(scopeLicense){
Boolean scopeLicense = null;
for(int i=0; i<list.size(); i++){
scopeLicense = list.get(i).checkLicense();
Logger.info("In Licensing:URLs:::::::::: " + list.get(i).fieldUrls.get(0).url + " - " + scopeLicense);
assertThat(scopeLicense).isEqualTo(true);
}
}
}
});
}
// Test
public void testWhois() throws ActException {
String url = "http://bl.uk/";
Scope.WHOIS_ENABLED = true;
boolean wr = Scope.INSTANCE.checkWhois(url, null);
assertThat(wr).isTrue();
// And clean up:
Scope.WHOIS_ENABLED = false;
}
}
| |
package com.github.clans.fab;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Outline;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.graphics.Xfermode;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.graphics.drawable.RippleDrawable;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.StateListDrawable;
import android.graphics.drawable.shapes.RoundRectShape;
import android.os.Build;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewOutlineProvider;
import android.view.animation.Animation;
import android.widget.TextView;
public class Label extends TextView {
private static final Xfermode PORTER_DUFF_CLEAR = new PorterDuffXfermode(PorterDuff.Mode.CLEAR);
private int mShadowRadius;
private int mShadowXOffset;
private int mShadowYOffset;
private int mShadowColor;
private Drawable mBackgroundDrawable;
private boolean mShowShadow = true;
private int mRawWidth;
private int mRawHeight;
private int mColorNormal;
private int mColorPressed;
private int mColorRipple;
private int mCornerRadius;
private FloatingActionButton mFab;
private Animation mShowAnimation;
private Animation mHideAnimation;
private boolean mUsingStyle;
private boolean mHandleVisibilityChanges = true;
public Label(Context context) {
super(context);
}
public Label(Context context, AttributeSet attrs) {
super(context, attrs);
}
public Label(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
setMeasuredDimension(calculateMeasuredWidth(), calculateMeasuredHeight());
}
private int calculateMeasuredWidth() {
if (mRawWidth == 0) {
mRawWidth = getMeasuredWidth();
}
return getMeasuredWidth() + calculateShadowWidth();
}
private int calculateMeasuredHeight() {
if (mRawHeight == 0) {
mRawHeight = getMeasuredHeight();
}
return getMeasuredHeight() + calculateShadowHeight();
}
int calculateShadowWidth() {
return mShowShadow ? (mShadowRadius + Math.abs(mShadowXOffset)) : 0;
}
int calculateShadowHeight() {
return mShowShadow ? (mShadowRadius + Math.abs(mShadowYOffset)) : 0;
}
void updateBackground() {
LayerDrawable layerDrawable;
if (mShowShadow) {
layerDrawable = new LayerDrawable(new Drawable[]{
new Shadow(),
createFillDrawable()
});
int leftInset = mShadowRadius + Math.abs(mShadowXOffset);
int topInset = mShadowRadius + Math.abs(mShadowYOffset);
int rightInset = (mShadowRadius + Math.abs(mShadowXOffset));
int bottomInset = (mShadowRadius + Math.abs(mShadowYOffset));
layerDrawable.setLayerInset(
1,
leftInset,
topInset,
rightInset,
bottomInset
);
} else {
layerDrawable = new LayerDrawable(new Drawable[]{
createFillDrawable()
});
}
setBackgroundCompat(layerDrawable);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private Drawable createFillDrawable() {
StateListDrawable drawable = new StateListDrawable();
drawable.addState(new int[]{android.R.attr.state_pressed}, createRectDrawable(mColorPressed));
drawable.addState(new int[]{}, createRectDrawable(mColorNormal));
if (Util.hasLollipop()) {
RippleDrawable ripple = new RippleDrawable(new ColorStateList(new int[][]{{}},
new int[]{mColorRipple}), drawable, null);
setOutlineProvider(new ViewOutlineProvider() {
@Override
public void getOutline(View view, Outline outline) {
outline.setOval(0, 0, view.getWidth(), view.getHeight());
}
});
setClipToOutline(true);
mBackgroundDrawable = ripple;
return ripple;
}
mBackgroundDrawable = drawable;
return drawable;
}
private Drawable createRectDrawable(int color) {
RoundRectShape shape = new RoundRectShape(
new float[]{
mCornerRadius,
mCornerRadius,
mCornerRadius,
mCornerRadius,
mCornerRadius,
mCornerRadius,
mCornerRadius,
mCornerRadius
},
null,
null);
ShapeDrawable shapeDrawable = new ShapeDrawable(shape);
shapeDrawable.getPaint().setColor(color);
return shapeDrawable;
}
private void setShadow(FloatingActionButton fab) {
mShadowColor = fab.getShadowColor();
mShadowRadius = fab.getShadowRadius();
mShadowXOffset = fab.getShadowXOffset();
mShadowYOffset = fab.getShadowYOffset();
mShowShadow = fab.hasShadow();
}
@SuppressWarnings("deprecation")
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void setBackgroundCompat(Drawable drawable) {
if (Util.hasJellyBean()) {
setBackground(drawable);
} else {
setBackgroundDrawable(drawable);
}
}
private void playShowAnimation() {
if (mShowAnimation != null) {
mHideAnimation.cancel();
startAnimation(mShowAnimation);
}
}
private void playHideAnimation() {
if (mHideAnimation != null) {
mShowAnimation.cancel();
startAnimation(mHideAnimation);
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
void onActionDown() {
if (mUsingStyle) {
mBackgroundDrawable = getBackground();
}
if (mBackgroundDrawable instanceof StateListDrawable) {
StateListDrawable drawable = (StateListDrawable) mBackgroundDrawable;
drawable.setState(new int[]{android.R.attr.state_pressed});
} else if (Util.hasLollipop() && mBackgroundDrawable instanceof RippleDrawable) {
RippleDrawable ripple = (RippleDrawable) mBackgroundDrawable;
ripple.setState(new int[]{android.R.attr.state_enabled, android.R.attr.state_pressed});
ripple.setHotspot(getMeasuredWidth() / 2, getMeasuredHeight() / 2);
ripple.setVisible(true, true);
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
void onActionUp() {
if (mUsingStyle) {
mBackgroundDrawable = getBackground();
}
if (mBackgroundDrawable instanceof StateListDrawable) {
StateListDrawable drawable = (StateListDrawable) mBackgroundDrawable;
drawable.setState(new int[]{});
} else if (Util.hasLollipop() && mBackgroundDrawable instanceof RippleDrawable) {
RippleDrawable ripple = (RippleDrawable) mBackgroundDrawable;
ripple.setState(new int[]{});
ripple.setHotspot(getMeasuredWidth() / 2, getMeasuredHeight() / 2);
ripple.setVisible(true, true);
}
}
void setFab(FloatingActionButton fab) {
mFab = fab;
setShadow(fab);
}
void setShowShadow(boolean show) {
mShowShadow = show;
}
void setCornerRadius(int cornerRadius) {
mCornerRadius = cornerRadius;
}
void setColors(int colorNormal, int colorPressed, int colorRipple) {
mColorNormal = colorNormal;
mColorPressed = colorPressed;
mColorRipple = colorRipple;
}
void show(boolean animate) {
if (animate) {
playShowAnimation();
}
setVisibility(VISIBLE);
}
void hide(boolean animate) {
if (animate) {
playHideAnimation();
}
setVisibility(INVISIBLE);
}
void setShowAnimation(Animation showAnimation) {
mShowAnimation = showAnimation;
}
void setHideAnimation(Animation hideAnimation) {
mHideAnimation = hideAnimation;
}
void setUsingStyle(boolean usingStyle) {
mUsingStyle = usingStyle;
}
void setHandleVisibilityChanges(boolean handle) {
mHandleVisibilityChanges = handle;
}
boolean isHandleVisibilityChanges() {
return mHandleVisibilityChanges;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (mFab == null || mFab.getOnClickListener() == null || !mFab.isEnabled()) {
return super.onTouchEvent(event);
}
int action = event.getAction();
switch (action) {
case MotionEvent.ACTION_UP:
onActionUp();
mFab.onActionUp();
break;
}
mGestureDetector.onTouchEvent(event);
return super.onTouchEvent(event);
}
GestureDetector mGestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDown(MotionEvent e) {
onActionDown();
if (mFab != null) {
mFab.onActionDown();
}
return super.onDown(e);
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
onActionUp();
if (mFab != null) {
mFab.onActionUp();
}
return super.onSingleTapUp(e);
}
});
private class Shadow extends Drawable {
private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private Paint mErase = new Paint(Paint.ANTI_ALIAS_FLAG);
private Shadow() {
this.init();
}
private void init() {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) {
setLayerType(LAYER_TYPE_SOFTWARE, null);
}
mPaint.setStyle(Paint.Style.FILL);
mPaint.setColor(mColorNormal);
mErase.setXfermode(PORTER_DUFF_CLEAR);
if (!isInEditMode()) {
mPaint.setShadowLayer(mShadowRadius, mShadowXOffset, mShadowYOffset, mShadowColor);
}
}
@Override
public void draw(Canvas canvas) {
RectF shadowRect = new RectF(
mShadowRadius + Math.abs(mShadowXOffset),
mShadowRadius + Math.abs(mShadowYOffset),
mRawWidth,
mRawHeight
);
canvas.drawRoundRect(shadowRect, mCornerRadius, mCornerRadius, mPaint);
canvas.drawRoundRect(shadowRect, mCornerRadius, mCornerRadius, mErase);
}
@Override
public void setAlpha(int alpha) {
}
@Override
public void setColorFilter(ColorFilter cf) {
}
@Override
public int getOpacity() {
return 0;
}
}
}
| |
package org.apache.ddlutils;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.sql.DataSource;
import org.apache.ddlutils.platform.axion.AxionPlatform;
import org.apache.ddlutils.platform.cloudscape.CloudscapePlatform;
import org.apache.ddlutils.platform.db2.Db2Platform;
import org.apache.ddlutils.platform.derby.DerbyPlatform;
import org.apache.ddlutils.platform.gemfirexd.GemFireXDPeerPlatform;
import org.apache.ddlutils.platform.gemfirexd.GemFireXDPlatform;
import org.apache.ddlutils.platform.firebird.FirebirdPlatform;
import org.apache.ddlutils.platform.hsqldb.HsqlDbPlatform;
import org.apache.ddlutils.platform.interbase.InterbasePlatform;
import org.apache.ddlutils.platform.mckoi.MckoiPlatform;
import org.apache.ddlutils.platform.mssql.MSSqlPlatform;
import org.apache.ddlutils.platform.mysql.MySqlPlatform;
import org.apache.ddlutils.platform.oracle.Oracle8Platform;
import org.apache.ddlutils.platform.postgresql.PostgreSqlPlatform;
import org.apache.ddlutils.platform.sapdb.SapDbPlatform;
import org.apache.ddlutils.platform.sybase.SybasePlatform;
/**
* Utility functions for dealing with database platforms.
*
* @version $Revision: 279421 $
*/
public class PlatformUtils
{
// Extended drivers that support more than one database
/** The DataDirect Connect DB2 jdbc driver. */
public static final String JDBC_DRIVER_DATADIRECT_DB2 = "com.ddtek.jdbc.db2.DB2Driver";
/** The DataDirect Connect SQLServer jdbc driver. */
public static final String JDBC_DRIVER_DATADIRECT_SQLSERVER = "com.ddtek.jdbc.sqlserver.SQLServerDriver";
/** The DataDirect Connect Oracle jdbc driver. */
public static final String JDBC_DRIVER_DATADIRECT_ORACLE = "com.ddtek.jdbc.oracle.OracleDriver";
/** The DataDirect Connect Sybase jdbc driver. */
public static final String JDBC_DRIVER_DATADIRECT_SYBASE = "com.ddtek.jdbc.sybase.SybaseDriver";
/** The i-net DB2 jdbc driver. */
public static final String JDBC_DRIVER_INET_DB2 = "com.inet.drda.DRDADriver";
/** The i-net Oracle jdbc driver. */
public static final String JDBC_DRIVER_INET_ORACLE = "com.inet.ora.OraDriver";
/** The i-net SQLServer jdbc driver. */
public static final String JDBC_DRIVER_INET_SQLSERVER = "com.inet.tds.TdsDriver";
/** The i-net Sybase jdbc driver. */
public static final String JDBC_DRIVER_INET_SYBASE = "com.inet.syb.SybDriver";
/** The i-net pooled jdbc driver for SQLServer and Sybase. */
public static final String JDBC_DRIVER_INET_POOLED = "com.inet.pool.PoolDriver";
/** The JNetDirect SQLServer jdbc driver. */
public static final String JDBC_DRIVER_JSQLCONNECT_SQLSERVER = "com.jnetdirect.jsql.JSQLDriver";
/** The jTDS jdbc driver for SQLServer and Sybase. */
public static final String JDBC_DRIVER_JTDS = "net.sourceforge.jtds.jdbc.Driver";
/** The subprotocol used by the DataDirect DB2 driver. */
public static final String JDBC_SUBPROTOCOL_DATADIRECT_DB2 = "datadirect:db2";
/** The subprotocol used by the DataDirect SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_DATADIRECT_SQLSERVER = "datadirect:sqlserver";
/** The subprotocol used by the DataDirect Oracle driver. */
public static final String JDBC_SUBPROTOCOL_DATADIRECT_ORACLE = "datadirect:oracle";
/** The subprotocol used by the DataDirect Sybase driver. */
public static final String JDBC_SUBPROTOCOL_DATADIRECT_SYBASE = "datadirect:sybase";
/** The subprotocol used by the i-net DB2 driver. */
public static final String JDBC_SUBPROTOCOL_INET_DB2 = "inetdb2";
/** The subprotocol used by the i-net Oracle driver. */
public static final String JDBC_SUBPROTOCOL_INET_ORACLE = "inetora";
/** A subprotocol used by the i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER = "inetdae";
/** A subprotocol used by the i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER6 = "inetdae6";
/** A subprotocol used by the i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER7 = "inetdae7";
/** A subprotocol used by the i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER7A = "inetdae7a";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER_POOLED_1 = "inetpool:inetdae";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER6_POOLED_1 = "inetpool:inetdae6";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER7_POOLED_1 = "inetpool:inetdae7";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER7A_POOLED_1 = "inetpool:inetdae7a";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER_POOLED_2 = "inetpool:jdbc:inetdae";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER6_POOLED_2 = "inetpool:jdbc:inetdae6";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER7_POOLED_2 = "inetpool:jdbc:inetdae7";
/** A subprotocol used by the pooled i-net SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_INET_SQLSERVER7A_POOLED_2 = "inetpool:jdbc:inetdae7a";
/** The subprotocol used by the i-net Sybase driver. */
public static final String JDBC_SUBPROTOCOL_INET_SYBASE = "inetsyb";
/** The subprotocol used by the pooled i-net Sybase driver. */
public static final String JDBC_SUBPROTOCOL_INET_SYBASE_POOLED_1 = "inetpool:inetsyb";
/** The subprotocol used by the pooled i-net Sybase driver. */
public static final String JDBC_SUBPROTOCOL_INET_SYBASE_POOLED_2 = "inetpool:jdbc:inetsyb";
/** The subprotocol used by the JNetDirect SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_JSQLCONNECT_SQLSERVER = "JSQLConnect";
/** The subprotocol used by the jTDS SQLServer driver. */
public static final String JDBC_SUBPROTOCOL_JTDS_SQLSERVER = "jtds:sqlserver";
/** The subprotocol used by the jTDS Sybase driver. */
public static final String JDBC_SUBPROTOCOL_JTDS_SYBASE = "jtds:sybase";
/** Maps the sub-protocl part of a jdbc connection url to a OJB platform name. */
private HashMap jdbcSubProtocolToPlatform = new HashMap();
/** Maps the jdbc driver name to a OJB platform name. */
private HashMap jdbcDriverToPlatform = new HashMap();
/**
* Creates a new instance.
*/
public PlatformUtils()
{
// Note that currently Sapdb and MaxDB have equal subprotocols and
// drivers so we have no means to distinguish them
jdbcSubProtocolToPlatform.put(AxionPlatform.JDBC_SUBPROTOCOL, AxionPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(CloudscapePlatform.JDBC_SUBPROTOCOL_1, CloudscapePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(CloudscapePlatform.JDBC_SUBPROTOCOL_2, CloudscapePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Db2Platform.JDBC_SUBPROTOCOL, Db2Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Db2Platform.JDBC_SUBPROTOCOL_OS390_1, Db2Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Db2Platform.JDBC_SUBPROTOCOL_OS390_2, Db2Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Db2Platform.JDBC_SUBPROTOCOL_JTOPEN, Db2Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_DATADIRECT_DB2, Db2Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_DB2, Db2Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(DerbyPlatform.JDBC_SUBPROTOCOL, DerbyPlatform.DATABASENAME);
// GemStone changes BEGIN
jdbcSubProtocolToPlatform.put(GemFireXDPlatform.JDBC_SUBPROTOCOL, GemFireXDPlatform.DATABASENAME);
// GemStone changes END
jdbcSubProtocolToPlatform.put(FirebirdPlatform.JDBC_SUBPROTOCOL, FirebirdPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(HsqlDbPlatform.JDBC_SUBPROTOCOL, HsqlDbPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(InterbasePlatform.JDBC_SUBPROTOCOL, InterbasePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(SapDbPlatform.JDBC_SUBPROTOCOL, SapDbPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(MckoiPlatform.JDBC_SUBPROTOCOL, MckoiPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(MSSqlPlatform.JDBC_SUBPROTOCOL, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(MSSqlPlatform.JDBC_SUBPROTOCOL_NEW, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(MSSqlPlatform.JDBC_SUBPROTOCOL_INTERNAL, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_DATADIRECT_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER6, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER7, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER7A, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER_POOLED_1, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER6_POOLED_1, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER7_POOLED_1, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER7A_POOLED_1, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER_POOLED_2, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER6_POOLED_2, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER7_POOLED_2, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SQLSERVER7A_POOLED_2, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_JSQLCONNECT_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_JTDS_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(MySqlPlatform.JDBC_SUBPROTOCOL, MySqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Oracle8Platform.JDBC_SUBPROTOCOL_THIN, Oracle8Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Oracle8Platform.JDBC_SUBPROTOCOL_OCI8, Oracle8Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(Oracle8Platform.JDBC_SUBPROTOCOL_THIN_OLD, Oracle8Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_DATADIRECT_ORACLE, Oracle8Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_ORACLE, Oracle8Platform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PostgreSqlPlatform.JDBC_SUBPROTOCOL, PostgreSqlPlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(SybasePlatform.JDBC_SUBPROTOCOL, SybasePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_DATADIRECT_SYBASE, SybasePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SYBASE, SybasePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SYBASE_POOLED_1, SybasePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_INET_SYBASE_POOLED_2, SybasePlatform.DATABASENAME);
jdbcSubProtocolToPlatform.put(PlatformUtils.JDBC_SUBPROTOCOL_JTDS_SYBASE, SybasePlatform.DATABASENAME);
jdbcDriverToPlatform.put(AxionPlatform.JDBC_DRIVER, AxionPlatform.DATABASENAME);
jdbcDriverToPlatform.put(Db2Platform.JDBC_DRIVER, Db2Platform.DATABASENAME);
jdbcDriverToPlatform.put(Db2Platform.JDBC_DRIVER_OLD1, Db2Platform.DATABASENAME);
jdbcDriverToPlatform.put(Db2Platform.JDBC_DRIVER_OLD2, Db2Platform.DATABASENAME);
jdbcDriverToPlatform.put(Db2Platform.JDBC_DRIVER_JTOPEN, Db2Platform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_DATADIRECT_DB2, Db2Platform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_INET_DB2, Db2Platform.DATABASENAME);
jdbcDriverToPlatform.put(DerbyPlatform.JDBC_DRIVER_EMBEDDED, DerbyPlatform.DATABASENAME);
jdbcDriverToPlatform.put(DerbyPlatform.JDBC_DRIVER, DerbyPlatform.DATABASENAME);
// GemStone changes BEGIN
jdbcDriverToPlatform.put(GemFireXDPlatform.JDBC_CLIENT_DRIVER, GemFireXDPlatform.DATABASENAME);
jdbcDriverToPlatform.put(GemFireXDPeerPlatform.JDBC_PEER_DRIVER, GemFireXDPeerPlatform.DATABASENAME);
// GemStone changes END
jdbcDriverToPlatform.put(FirebirdPlatform.JDBC_DRIVER, FirebirdPlatform.DATABASENAME);
jdbcDriverToPlatform.put(HsqlDbPlatform.JDBC_DRIVER, HsqlDbPlatform.DATABASENAME);
jdbcDriverToPlatform.put(InterbasePlatform.JDBC_DRIVER, InterbasePlatform.DATABASENAME);
jdbcDriverToPlatform.put(SapDbPlatform.JDBC_DRIVER, SapDbPlatform.DATABASENAME);
jdbcDriverToPlatform.put(MckoiPlatform.JDBC_DRIVER, MckoiPlatform.DATABASENAME);
jdbcDriverToPlatform.put(MSSqlPlatform.JDBC_DRIVER, MSSqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(MSSqlPlatform.JDBC_DRIVER_NEW, MSSqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_DATADIRECT_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_INET_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_JSQLCONNECT_SQLSERVER, MSSqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(MySqlPlatform.JDBC_DRIVER, MySqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(MySqlPlatform.JDBC_DRIVER_OLD, MySqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(Oracle8Platform.JDBC_DRIVER, Oracle8Platform.DATABASENAME);
jdbcDriverToPlatform.put(Oracle8Platform.JDBC_DRIVER_OLD, Oracle8Platform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_DATADIRECT_ORACLE, Oracle8Platform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_INET_ORACLE, Oracle8Platform.DATABASENAME);
jdbcDriverToPlatform.put(PostgreSqlPlatform.JDBC_DRIVER, PostgreSqlPlatform.DATABASENAME);
jdbcDriverToPlatform.put(SybasePlatform.JDBC_DRIVER, SybasePlatform.DATABASENAME);
jdbcDriverToPlatform.put(SybasePlatform.JDBC_DRIVER_OLD, SybasePlatform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_DATADIRECT_SYBASE, SybasePlatform.DATABASENAME);
jdbcDriverToPlatform.put(PlatformUtils.JDBC_DRIVER_INET_SYBASE, SybasePlatform.DATABASENAME);
}
/**
* Tries to determine the database type for the given data source. Note that this will establish
* a connection to the database.
*
* @param dataSource The data source
* @return The database type or <code>null</code> if the database type couldn't be determined
*/
public String determineDatabaseType(DataSource dataSource) throws DatabaseOperationException
{
return determineDatabaseType(dataSource, null, null);
}
/**
* Tries to determine the database type for the given data source. Note that this will establish
* a connection to the database.
*
* @param dataSource The data source
* @param username The user name to use for connecting to the database
* @param password The password to use for connecting to the database
* @return The database type or <code>null</code> if the database type couldn't be determined
*/
public String determineDatabaseType(DataSource dataSource, String username, String password) throws DatabaseOperationException
{
Connection connection = null;
try
{
if (username != null)
{
connection = dataSource.getConnection(username, password);
}
else
{
connection = dataSource.getConnection();
}
DatabaseMetaData metaData = connection.getMetaData();
return determineDatabaseType(metaData.getDriverName(), metaData.getURL());
}
catch (SQLException ex)
{
throw new DatabaseOperationException("Error while reading the database metadata: " + ex.getMessage(), ex);
}
finally
{
if (connection != null)
{
try
{
connection.close();
}
catch (SQLException ex)
{
// we ignore this one
}
}
}
}
/**
* Tries to determine the database type for the given jdbc driver and connection url.
*
* @param driverName The fully qualified name of the JDBC driver
* @param jdbcConnectionUrl The connection url
* @return The database type or <code>null</code> if the database type couldn't be determined
*/
public String determineDatabaseType(String driverName, String jdbcConnectionUrl)
{
if (jdbcDriverToPlatform.containsKey(driverName))
{
return (String)jdbcDriverToPlatform.get(driverName);
}
if (jdbcConnectionUrl == null)
{
return null;
}
for (Iterator it = jdbcSubProtocolToPlatform.entrySet().iterator(); it.hasNext();)
{
Map.Entry entry = (Map.Entry)it.next();
String curSubProtocol = "jdbc:" + (String)entry.getKey() + ":";
if (jdbcConnectionUrl.startsWith(curSubProtocol))
{
return (String)entry.getValue();
}
}
return null;
}
}
| |
package io.bazel.rulesscala.scalac;
import static java.io.File.pathSeparator;
import io.bazel.rulesscala.io_utils.StreamCopy;
import io.bazel.rulesscala.jar.JarCreator;
import io.bazel.rulesscala.worker.Worker;
import java.io.*;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import scala.tools.nsc.reporters.ConsoleReporter;
class ScalacWorker implements Worker.Interface {
private static final boolean isWindows =
System.getProperty("os.name").toLowerCase().contains("windows");
public static void main(String[] args) throws Exception {
Worker.workerMain(args, new ScalacWorker());
}
@Override
public void work(String[] args) throws Exception {
CompileOptions ops = new CompileOptions(args);
Path outputJar = Paths.get(ops.outputName);
Path workdir = ensureEmptyWorkDirectory(outputJar, ops.currentTarget);
Path classes = Files.createDirectories(workdir.resolve("classes"));
Path sources = Files.createDirectories(workdir.resolve("sources"));
List<File> jarFiles = extractSourceJars(ops, sources);
List<File> scalaJarFiles = filterFilesByExtension(jarFiles, ".scala");
List<File> javaJarFiles = filterFilesByExtension(jarFiles, ".java");
if (!ops.expectJavaOutput && ops.javaFiles.length != 0) {
throw new RuntimeException("Cannot have java source files when no expected java output");
}
if (!ops.expectJavaOutput && !javaJarFiles.isEmpty()) {
throw new RuntimeException(
"Found java files in source jars but expect Java output is set to false");
}
String[] scalaSources = collectSrcJarSources(ops.files, scalaJarFiles, javaJarFiles);
String[] javaSources = appendToString(ops.javaFiles, javaJarFiles);
if (scalaSources.length == 0 && javaSources.length == 0) {
throw new RuntimeException("Must have input files from either source jars or local files.");
}
/**
* Compile scala sources if available (if there are none, we will simply compile java sources).
*/
if (scalaSources.length > 0) {
compileScalaSources(ops, scalaSources, classes);
}
/** Copy the resources */
copyResources(ops.resourceSources, ops.resourceTargets, classes);
/** Extract and copy resources from resource jars */
copyResourceJars(ops.resourceJars, classes);
/** Copy classpath resources to root of jar */
copyClasspathResourcesToRoot(ops.classpathResourceFiles, classes);
/** Now build the output jar */
String[] jarCreatorArgs = {
"-m", ops.manifestPath, "-t", ops.stampLabel, outputJar.toString(), classes.toString()
};
JarCreator.main(jarCreatorArgs);
}
private static Path ensureEmptyWorkDirectory(Path output, String label) throws IOException {
String base = label.substring(label.lastIndexOf(':') + 1);
Path dir = output.resolveSibling("_scalac").resolve(base);
if (Files.exists(dir)) {
deleteRecursively(dir);
}
return Files.createDirectories(dir);
}
private static String[] collectSrcJarSources(
String[] files, List<File> scalaJarFiles, List<File> javaJarFiles) {
String[] scalaSources = appendToString(files, scalaJarFiles);
return appendToString(scalaSources, javaJarFiles);
}
private static List<File> filterFilesByExtension(List<File> files, String extension) {
List<File> filtered = new ArrayList<File>();
for (File f : files) {
if (f.toString().endsWith(extension)) {
filtered.add(f);
}
}
return filtered;
}
private static final String[] sourceExtensions = {".scala", ".java"};
private static List<File> extractSourceJars(CompileOptions opts, Path sources)
throws IOException {
List<File> sourceFiles = new ArrayList<File>();
for (int i = 0; i < opts.sourceJars.length; i++) {
String jarPath = opts.sourceJars[i];
if (jarPath.length() > 0) {
String sourceJarFileName = String.format("%s_%s", i, Paths.get(jarPath).getFileName());
Path sourceJarDestination = Files.createDirectories(sources.resolve(sourceJarFileName));
sourceFiles.addAll(extractJar(jarPath, sourceJarDestination.toString(), sourceExtensions));
}
}
return sourceFiles;
}
private static List<File> extractJar(String jarPath, String outputFolder, String[] extensions)
throws IOException {
List<File> outputPaths = new ArrayList<>();
JarFile jar = new JarFile(jarPath);
Enumeration<JarEntry> e = jar.entries();
while (e.hasMoreElements()) {
JarEntry file = e.nextElement();
String thisFileName = file.getName();
// we don't bother to extract non-scala/java sources (skip manifest)
if (extensions != null && !matchesFileExtensions(thisFileName, extensions)) {
continue;
}
File f = new File(outputFolder + File.separator + file.getName());
if (file.isDirectory()) { // if it's a directory, create it
f.mkdirs();
continue;
}
File parent = f.getParentFile();
parent.mkdirs();
outputPaths.add(f);
try (InputStream is = jar.getInputStream(file);
OutputStream fos = new FileOutputStream(f)) {
StreamCopy.copy(is, fos);
}
}
return outputPaths;
}
private static boolean matchesFileExtensions(String fileName, String[] extensions) {
for (String e : extensions) {
if (fileName.endsWith(e)) {
return true;
}
}
return false;
}
private static String[] encodeBazelTargets(String[] targets) {
return Arrays.stream(targets).map(ScalacWorker::encodeBazelTarget).toArray(String[]::new);
}
private static String encodeBazelTarget(String target) {
return target.replace(":", ";");
}
private static boolean isModeEnabled(String mode) {
return !"off".equals(mode);
}
public static String[] buildPluginArgs(String[] pluginElements) {
int numPlugins = 0;
for (int i = 0; i < pluginElements.length; i++) {
if (pluginElements[i].length() > 0) {
numPlugins += 1;
}
}
String[] result = new String[numPlugins];
int idx = 0;
for (int i = 0; i < pluginElements.length; i++) {
if (pluginElements[i].length() > 0) {
result[idx] = "-Xplugin:" + pluginElements[i];
idx += 1;
}
}
return result;
}
private static String[] getPluginParamsFrom(CompileOptions ops) {
List<String> pluginParams = new ArrayList<>(0);
if (isModeEnabled(ops.strictDepsMode) || isModeEnabled(ops.unusedDependencyCheckerMode)) {
String currentTarget = encodeBazelTarget(ops.currentTarget);
String[] dependencyAnalyzerParams = {
"-P:dependency-analyzer:strict-deps-mode:" + ops.strictDepsMode,
"-P:dependency-analyzer:unused-deps-mode:" + ops.unusedDependencyCheckerMode,
"-P:dependency-analyzer:current-target:" + currentTarget,
"-P:dependency-analyzer:dependency-tracking-method:" + ops.dependencyTrackingMethod,
};
pluginParams.addAll(Arrays.asList(dependencyAnalyzerParams));
if (ops.directJars.length > 0) {
pluginParams.add("-P:dependency-analyzer:direct-jars:" + String.join(":", ops.directJars));
}
if (ops.directTargets.length > 0) {
String[] directTargets = encodeBazelTargets(ops.directTargets);
pluginParams.add(
"-P:dependency-analyzer:direct-targets:" + String.join(":", directTargets));
}
if (ops.indirectJars.length > 0) {
pluginParams.add(
"-P:dependency-analyzer:indirect-jars:" + String.join(":", ops.indirectJars));
}
if (ops.indirectTargets.length > 0) {
String[] indirectTargets = encodeBazelTargets(ops.indirectTargets);
pluginParams.add(
"-P:dependency-analyzer:indirect-targets:" + String.join(":", indirectTargets));
}
if (ops.unusedDepsIgnoredTargets.length > 0) {
String[] ignoredTargets = encodeBazelTargets(ops.unusedDepsIgnoredTargets);
pluginParams.add(
"-P:dependency-analyzer:unused-deps-ignored-targets:"
+ String.join(":", ignoredTargets));
}
}
return pluginParams.toArray(new String[pluginParams.size()]);
}
private static void compileScalaSources(CompileOptions ops, String[] scalaSources, Path classes)
throws IllegalAccessException, IOException {
String[] pluginArgs = buildPluginArgs(ops.plugins);
String[] pluginParams = getPluginParamsFrom(ops);
String[] constParams = {
"-classpath", String.join(pathSeparator, ops.classpath), "-d", classes.toString()
};
String[] compilerArgs =
merge(ops.scalaOpts, pluginArgs, constParams, pluginParams, scalaSources);
ReportableMainClass comp = new ReportableMainClass(ops);
long start = System.currentTimeMillis();
try {
comp.process(compilerArgs);
} catch (Throwable ex) {
if (ex.toString().contains("scala.reflect.internal.Types$TypeError")) {
throw new RuntimeException("Build failure with type error", ex);
} else {
throw ex;
}
}
long stop = System.currentTimeMillis();
if (ops.printCompileTime) {
System.err.println("Compiler runtime: " + (stop - start) + "ms.");
}
try {
String buildTime = "";
// If enable stats file we write the volatile string component
// otherwise empty string for better remote cache performance.
if (ops.enableStatsFile) {
buildTime = Long.toString(stop - start);
}
Files.write(Paths.get(ops.statsfile), Arrays.asList("build_time=" + buildTime));
} catch (IOException ex) {
throw new RuntimeException("Unable to write statsfile to " + ops.statsfile, ex);
}
ConsoleReporter reporter = (ConsoleReporter) comp.getReporter();
if (reporter instanceof ProtoReporter) {
ProtoReporter protoReporter = (ProtoReporter) reporter;
protoReporter.writeTo(Paths.get(ops.diagnosticsFile));
}
if (reporter.hasErrors()) {
reporter.flush();
throw new RuntimeException("Build failed");
}
}
private static void deleteRecursively(Path directory) throws IOException {
if (directory != null) {
Files.walkFileTree(
directory,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
if (isWindows) {
file.toFile().setWritable(true);
}
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc)
throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}
private static void copyResources(String[] sources, String[] targets, Path dest)
throws IOException {
if (sources.length != targets.length)
throw new RuntimeException(
String.format(
"mismatch in resources: sources: %s targets: %s",
Arrays.toString(sources), Arrays.toString(targets)));
for (int i = 0; i < sources.length; i++) {
Path source = Paths.get(sources[i]);
Path target = dest.resolve(targets[i]);
target.getParent().toFile().mkdirs();
Files.copy(source, target);
}
}
private static void copyClasspathResourcesToRoot(String[] classpathResourceFiles, Path dest)
throws IOException {
for (String s : classpathResourceFiles) {
Path source = Paths.get(s);
Path target = dest.resolve(source.getFileName());
if (Files.exists(target)) {
System.err.println(
"Classpath resource file "
+ source.getFileName()
+ " has a namespace conflict with another file: "
+ target.getFileName());
} else {
Files.copy(source, target);
}
}
}
private static void copyResourceJars(String[] resourceJars, Path dest) throws IOException {
for (String jarPath : resourceJars) {
extractJar(jarPath, dest.toString(), null);
}
}
private static <T> String[] appendToString(String[] init, List<T> rest) {
String[] tmp = new String[init.length + rest.size()];
System.arraycopy(init, 0, tmp, 0, init.length);
int baseIdx = init.length;
for (T t : rest) {
tmp[baseIdx] = t.toString();
baseIdx += 1;
}
return tmp;
}
private static String[] merge(String[]... arrays) {
int totalLength = 0;
for (String[] arr : arrays) {
totalLength += arr.length;
}
String[] result = new String[totalLength];
int offset = 0;
for (String[] arr : arrays) {
System.arraycopy(arr, 0, result, offset, arr.length);
offset += arr.length;
}
return result;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* BundleInstanceType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* BundleInstanceType bean class
*/
public class BundleInstanceType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = BundleInstanceType
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for InstanceId
*/
protected java.lang.String localInstanceId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getInstanceId(){
return localInstanceId;
}
/**
* Auto generated setter method
* @param param InstanceId
*/
public void setInstanceId(java.lang.String param){
this.localInstanceId=param;
}
/**
* field for Storage
*/
protected com.amazon.ec2.BundleInstanceTaskStorageType localStorage ;
/**
* Auto generated getter method
* @return com.amazon.ec2.BundleInstanceTaskStorageType
*/
public com.amazon.ec2.BundleInstanceTaskStorageType getStorage(){
return localStorage;
}
/**
* Auto generated setter method
* @param param Storage
*/
public void setStorage(com.amazon.ec2.BundleInstanceTaskStorageType param){
this.localStorage=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
BundleInstanceType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":BundleInstanceType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"BundleInstanceType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"instanceId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"instanceId");
}
} else {
xmlWriter.writeStartElement("instanceId");
}
if (localInstanceId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("instanceId cannot be null!!");
}else{
xmlWriter.writeCharacters(localInstanceId);
}
xmlWriter.writeEndElement();
if (localStorage==null){
throw new org.apache.axis2.databinding.ADBException("storage cannot be null!!");
}
localStorage.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","storage"),
factory,xmlWriter);
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"instanceId"));
if (localInstanceId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localInstanceId));
} else {
throw new org.apache.axis2.databinding.ADBException("instanceId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"storage"));
if (localStorage==null){
throw new org.apache.axis2.databinding.ADBException("storage cannot be null!!");
}
elementList.add(localStorage);
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static BundleInstanceType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
BundleInstanceType object =
new BundleInstanceType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"BundleInstanceType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (BundleInstanceType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","instanceId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setInstanceId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","storage").equals(reader.getName())){
object.setStorage(com.amazon.ec2.BundleInstanceTaskStorageType.Factory.parse(reader));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class MinScoreScorerTests extends LuceneTestCase {
private static DocIdSetIterator iterator(final int... docs) {
return new DocIdSetIterator() {
int i = -1;
@Override
public int nextDoc() throws IOException {
if (i + 1 == docs.length) {
return NO_MORE_DOCS;
} else {
return docs[++i];
}
}
@Override
public int docID() {
return i < 0 ? -1 : i == docs.length ? NO_MORE_DOCS : docs[i];
}
@Override
public long cost() {
return docs.length;
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
};
}
private static Weight fakeWeight() {
return new Weight(new MatchAllDocsQuery()) {
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return null;
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
return null;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false;
}
};
}
private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) {
final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs);
return new Scorer(fakeWeight()) {
int lastScoredDoc = -1;
public DocIdSetIterator iterator() {
if (twoPhase) {
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
} else {
return iterator;
}
}
public TwoPhaseIterator twoPhaseIterator() {
if (twoPhase) {
return new TwoPhaseIterator(iterator) {
@Override
public boolean matches() throws IOException {
return Arrays.binarySearch(docs, iterator.docID()) >= 0;
}
@Override
public float matchCost() {
return 10;
}
};
} else {
return null;
}
}
@Override
public int docID() {
return iterator.docID();
}
@Override
public float score() throws IOException {
assertNotEquals("score() called twice on doc " + docID(), lastScoredDoc, docID());
lastScoredDoc = docID();
final int idx = Arrays.binarySearch(docs, docID());
return scores[idx];
}
@Override
public float getMaxScore(int upTo) throws IOException {
return Float.MAX_VALUE;
}
};
}
public void doTestRandom(boolean twoPhase) throws IOException {
final int maxDoc = TestUtil.nextInt(random(), 10, 10000);
final int numDocs = TestUtil.nextInt(random(), 1, maxDoc / 2);
final Set<Integer> uniqueDocs = new HashSet<>();
while (uniqueDocs.size() < numDocs) {
uniqueDocs.add(random().nextInt(maxDoc));
}
final int[] docs = new int[numDocs];
int i = 0;
for (int doc : uniqueDocs) {
docs[i++] = doc;
}
Arrays.sort(docs);
final float[] scores = new float[numDocs];
for (i = 0; i < numDocs; ++i) {
scores[i] = random().nextFloat();
}
Scorer scorer = scorer(maxDoc, docs, scores, twoPhase);
final float minScore = random().nextFloat();
Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore);
int doc = -1;
while (doc != DocIdSetIterator.NO_MORE_DOCS) {
final int target;
if (random().nextBoolean()) {
target = doc + 1;
doc = minScoreScorer.iterator().nextDoc();
} else {
target = doc + TestUtil.nextInt(random(), 1, 10);
doc = minScoreScorer.iterator().advance(target);
}
int idx = Arrays.binarySearch(docs, target);
if (idx < 0) {
idx = -1 - idx;
}
while (idx < docs.length && scores[idx] < minScore) {
idx += 1;
}
if (idx == docs.length) {
assertEquals(DocIdSetIterator.NO_MORE_DOCS, doc);
} else {
assertEquals(docs[idx], doc);
assertEquals(scores[idx], minScoreScorer.score(), 0f);
}
}
}
public void testRegularIterator() throws IOException {
final int iters = atLeast(5);
for (int iter = 0; iter < iters; ++iter) {
doTestRandom(false);
}
}
public void testTwoPhaseIterator() throws IOException {
final int iters = atLeast(5);
for (int iter = 0; iter < iters; ++iter) {
doTestRandom(true);
}
}
}
| |
package com.brainbeanapps.rosty.printseditordemo.ui.view;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.drawable.GradientDrawable;
import android.graphics.drawable.LayerDrawable;
import android.os.Bundle;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.brainbeanapps.rosty.printseditordemo.R;
import com.brainbeanapps.rosty.printseditordemo.utile.Helper;
import com.nineoldandroids.view.ViewHelper;
/**
* Created by https://github.com/navasmdc
*/
public class Slider extends CustomView {
private int backgroundColor = Color.parseColor("#4CAF50");
private Ball ball;
private Bitmap bitmap;
private int max = 100;
private int min = 0;
private NumberIndicator numberIndicator;
private OnValueChangedListener onValueChangedListener;
private boolean placedBall = false;
private boolean press = false;
private boolean showNumberIndicator = false;
private int value = 0;
public Slider(Context context, AttributeSet attrs) {
super(context, attrs);
setAttributes(attrs);
}
public int getMax() {
return max;
}
public void setMax(int max) {
this.max = max;
}
public int getMin() {
return min;
}
public void setMin(int min) {
this.min = min;
}
public OnValueChangedListener getOnValueChangedListener() {
return onValueChangedListener;
}
public void setOnValueChangedListener(
OnValueChangedListener onValueChangedListener) {
this.onValueChangedListener = onValueChangedListener;
}
// GETERS & SETTERS
public int getValue() {
return value;
}
public void setValue(final int value) {
if (!placedBall)
post(new Runnable() {
@Override
public void run() {
setValue(value);
}
});
else {
this.value = value;
float division = (ball.xFin - ball.xIni) / max;
ViewHelper.setX(ball,
value * division + getHeight() / 2 - ball.getWidth() / 2);
ball.changeBackground();
}
}
@Override
public void invalidate() {
ball.invalidate();
super.invalidate();
}
public boolean isShowNumberIndicator() {
return showNumberIndicator;
}
public void setShowNumberIndicator(boolean showNumberIndicator) {
this.showNumberIndicator = showNumberIndicator;
numberIndicator = (showNumberIndicator) ? new NumberIndicator(
getContext()) : null;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
isLastTouch = true;
if (isEnabled()) {
if (event.getAction() == MotionEvent.ACTION_DOWN
|| event.getAction() == MotionEvent.ACTION_MOVE) {
if (numberIndicator != null && !numberIndicator.isShowing())
numberIndicator.show();
if ((event.getX() <= getWidth() && event.getX() >= 0)) {
press = true;
// calculate value
int newValue = 0;
float division = (ball.xFin - ball.xIni) / (max - min);
if (event.getX() > ball.xFin) {
newValue = max;
} else if (event.getX() < ball.xIni) {
newValue = min;
} else {
newValue = min + (int) ((event.getX() - ball.xIni) / division);
}
if (value != newValue) {
value = newValue;
if (onValueChangedListener != null)
onValueChangedListener.onValueChanged(newValue);
}
// move ball indicator
float x = event.getX();
x = (x < ball.xIni) ? ball.xIni : x;
x = (x > ball.xFin) ? ball.xFin : x;
ViewHelper.setX(ball, x);
ball.changeBackground();
// If slider has number indicator
if (numberIndicator != null) {
// move number indicator
numberIndicator.indicator.x = x;
numberIndicator.indicator.finalY = Helper
.getRelativeTop(this) - getHeight() / 2;
numberIndicator.indicator.finalSize = getHeight() / 2;
numberIndicator.numberIndicator.setText("");
}
} else {
press = false;
isLastTouch = false;
if (numberIndicator != null)
numberIndicator.dismiss();
}
} else if (event.getAction() == MotionEvent.ACTION_UP ||
event.getAction() == MotionEvent.ACTION_CANCEL) {
if (numberIndicator != null)
numberIndicator.dismiss();
isLastTouch = false;
press = false;
}
}
return true;
}
@Override
public void setBackgroundColor(int color) {
backgroundColor = color;
if (isEnabled())
beforeBackground = backgroundColor;
}
/**
* Make a dark color to press effect
*/
protected int makePressColor() {
int r = (this.backgroundColor >> 16) & 0xFF;
int g = (this.backgroundColor >> 8) & 0xFF;
int b = (this.backgroundColor >> 0) & 0xFF;
r = (r - 30 < 0) ? 0 : r - 30;
g = (g - 30 < 0) ? 0 : g - 30;
b = (b - 30 < 0) ? 0 : b - 30;
return Color.argb(70, r, g, b);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (!placedBall) {
placeBall();
}
Paint paint = new Paint();
if (value == min) {
// Crop line to transparent effect
if (bitmap == null) {
bitmap = Bitmap.createBitmap(canvas.getWidth(),
canvas.getHeight(), Bitmap.Config.ARGB_8888);
}
Canvas temp = new Canvas(bitmap);
paint.setColor(Color.parseColor("#B0B0B0"));
paint.setStrokeWidth(Helper.dpToPx(2, getResources()));
temp.drawLine(getHeight() / 2, getHeight() / 2, getWidth()
- getHeight() / 2, getHeight() / 2, paint);
Paint transparentPaint = new Paint();
transparentPaint.setColor(getResources().getColor(
android.R.color.transparent));
transparentPaint.setXfermode(new PorterDuffXfermode(
PorterDuff.Mode.CLEAR));
temp.drawCircle(ViewHelper.getX(ball) + ball.getWidth() / 2,
ViewHelper.getY(ball) + ball.getHeight() / 2,
ball.getWidth() / 2, transparentPaint);
canvas.drawBitmap(bitmap, 0, 0, new Paint());
} else {
paint.setColor(Color.parseColor("#B0B0B0"));
paint.setStrokeWidth(Helper.dpToPx(2, getResources()));
canvas.drawLine(getHeight() / 2, getHeight() / 2, getWidth()
- getHeight() / 2, getHeight() / 2, paint);
paint.setColor(backgroundColor);
float division = (ball.xFin - ball.xIni) / (max - min);
int value = this.value - min;
canvas.drawLine(getHeight() / 2, getHeight() / 2, value * division
+ getHeight() / 2, getHeight() / 2, paint);
}
if (press && !showNumberIndicator) {
paint.setColor(backgroundColor);
paint.setAntiAlias(true);
canvas.drawCircle(ViewHelper.getX(ball) + ball.getWidth() / 2,
getHeight() / 2, getHeight() / 3, paint);
}
invalidate();
}
// Set atributtes of XML to View
protected void setAttributes(AttributeSet attrs) {
setBackgroundResource(R.drawable.bg_transparent);
// Set size of view
setMinimumHeight(Helper.dpToPx(48, getResources()));
setMinimumWidth(Helper.dpToPx(80, getResources()));
// Set background Color
// Color by resource
int bacgroundColor = attrs.getAttributeResourceValue(ANDROIDXML,
"background", -1);
if (bacgroundColor != -1) {
setBackgroundColor(getResources().getColor(bacgroundColor));
} else {
// Color by hexadecimal
int background = attrs.getAttributeIntValue(ANDROIDXML, "background", -1);
if (background != -1)
setBackgroundColor(background);
}
showNumberIndicator = attrs.getAttributeBooleanValue(MATERIALDESIGNXML,
"showNumberIndicator", false);
min = attrs.getAttributeIntValue(MATERIALDESIGNXML, "min", 0);
max = attrs.getAttributeIntValue(MATERIALDESIGNXML, "max", 0);
value = attrs.getAttributeIntValue(MATERIALDESIGNXML, "value", min);
ball = new Ball(getContext());
RelativeLayout.LayoutParams params = new LayoutParams(Helper.dpToPx(20,
getResources()), Helper.dpToPx(20, getResources()));
params.addRule(RelativeLayout.CENTER_VERTICAL, RelativeLayout.TRUE);
ball.setLayoutParams(params);
addView(ball);
// Set if slider content number indicator
// TODO
if (showNumberIndicator) {
numberIndicator = new NumberIndicator(getContext());
}
}
private void placeBall() {
ViewHelper.setX(ball, getHeight() / 2 - ball.getWidth() / 2);
ball.xIni = ViewHelper.getX(ball);
ball.xFin = getWidth() - getHeight() / 2 - ball.getWidth() / 2;
ball.xCen = getWidth() / 2 - ball.getWidth() / 2;
placedBall = true;
}
// Event when slider change value
public interface OnValueChangedListener {
void onValueChanged(int value);
}
class Ball extends View {
float xIni, xFin, xCen;
public Ball(Context context) {
super(context);
setBackgroundResource(R.drawable.bg_switch_ball_uncheck);
}
public void changeBackground() {
if (value != min) {
setBackgroundResource(R.drawable.bg_switch_ball_check);
LayerDrawable layer = (LayerDrawable) getBackground();
GradientDrawable shape = (GradientDrawable) layer
.findDrawableByLayerId(R.id.shape_bacground);
shape.setColor(backgroundColor);
} else {
setBackgroundResource(R.drawable.bg_switch_ball_uncheck);
}
}
}
// Slider Number Indicator
class Indicator extends RelativeLayout {
boolean animate = true;
// Final size after animation
float finalSize = 0;
// Final y position after animation
float finalY = 0;
boolean numberIndicatorResize = false;
// Size of number indicator
float size = 0;
// Position of number indicator
float x = 0;
float y = 0;
public Indicator(Context context) {
super(context);
setBackgroundColor(getResources().getColor(
android.R.color.transparent));
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (!numberIndicatorResize) {
RelativeLayout.LayoutParams params = (LayoutParams) numberIndicator.numberIndicator
.getLayoutParams();
params.height = (int) finalSize * 2;
params.width = (int) finalSize * 2;
numberIndicator.numberIndicator.setLayoutParams(params);
}
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setColor(backgroundColor);
if (animate) {
if (y == 0)
y = finalY + finalSize * 2;
y -= Helper.dpToPx(6, getResources());
size += Helper.dpToPx(2, getResources());
}
canvas.drawCircle(
ViewHelper.getX(ball)
+ Helper.getRelativeLeft((View) ball.getParent())
+ ball.getWidth() / 2, y, size, paint);
if (animate && size >= finalSize)
animate = false;
if (!animate) {
ViewHelper
.setX(numberIndicator.numberIndicator,
(ViewHelper.getX(ball)
+ Helper.getRelativeLeft((View) ball
.getParent()) + ball.getWidth() / 2)
- size);
ViewHelper.setY(numberIndicator.numberIndicator, y - size);
numberIndicator.numberIndicator.setText(value + "");
}
invalidate();
}
}
class NumberIndicator extends Dialog {
Indicator indicator;
TextView numberIndicator;
public NumberIndicator(Context context) {
super(context, android.R.style.Theme_Translucent);
}
@Override
public void dismiss() {
super.dismiss();
indicator.y = 0;
indicator.size = 0;
indicator.animate = true;
}
@Override
public void onBackPressed() {
}
@Override
protected void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_number_indicator_spinner);
setCanceledOnTouchOutside(false);
RelativeLayout content = (RelativeLayout) this
.findViewById(R.id.number_indicator_spinner_content);
indicator = new Indicator(this.getContext());
content.addView(indicator);
numberIndicator = new TextView(getContext());
numberIndicator.setTextColor(Color.WHITE);
numberIndicator.setGravity(Gravity.CENTER);
content.addView(numberIndicator);
indicator.setLayoutParams(new RelativeLayout.LayoutParams(
RelativeLayout.LayoutParams.FILL_PARENT,
RelativeLayout.LayoutParams.FILL_PARENT));
}
}
}
| |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.appsearch.app;
import android.annotation.SuppressLint;
import android.os.Bundle;
import androidx.annotation.IntDef;
import androidx.annotation.IntRange;
import androidx.annotation.NonNull;
import androidx.annotation.RestrictTo;
import androidx.appsearch.annotation.Document;
import androidx.appsearch.exceptions.AppSearchException;
import androidx.appsearch.util.BundleUtil;
import androidx.collection.ArrayMap;
import androidx.core.util.Preconditions;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This class represents the specification logic for AppSearch. It can be used to set the type of
* search, like prefix or exact only or apply filters to search for a specific schema type only etc.
*/
// TODO(sidchhabra) : AddResultSpec fields for Snippets etc.
public final class SearchSpec {
/**
* Schema type to be used in {@link SearchSpec.Builder#addProjection} to apply
* property paths to all results, excepting any types that have had their own, specific
* property paths set.
*/
public static final String PROJECTION_SCHEMA_TYPE_WILDCARD = "*";
static final String TERM_MATCH_TYPE_FIELD = "termMatchType";
static final String SCHEMA_FIELD = "schema";
static final String NAMESPACE_FIELD = "namespace";
static final String PACKAGE_NAME_FIELD = "packageName";
static final String NUM_PER_PAGE_FIELD = "numPerPage";
static final String RANKING_STRATEGY_FIELD = "rankingStrategy";
static final String ORDER_FIELD = "order";
static final String SNIPPET_COUNT_FIELD = "snippetCount";
static final String SNIPPET_COUNT_PER_PROPERTY_FIELD = "snippetCountPerProperty";
static final String MAX_SNIPPET_FIELD = "maxSnippet";
static final String PROJECTION_TYPE_PROPERTY_PATHS_FIELD = "projectionTypeFieldMasks";
static final String RESULT_GROUPING_TYPE_FLAGS = "resultGroupingTypeFlags";
static final String RESULT_GROUPING_LIMIT = "resultGroupingLimit";
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public static final int DEFAULT_NUM_PER_PAGE = 10;
// TODO(b/170371356): In framework, we may want these limits to be flag controlled.
// If that happens, the @IntRange() directives in this class may have to change.
private static final int MAX_NUM_PER_PAGE = 10_000;
private static final int MAX_SNIPPET_COUNT = 10_000;
private static final int MAX_SNIPPET_PER_PROPERTY_COUNT = 10_000;
private static final int MAX_SNIPPET_SIZE_LIMIT = 10_000;
/**
* Term Match Type for the query.
*
* @hide
*/
// NOTE: The integer values of these constants must match the proto enum constants in
// {@link com.google.android.icing.proto.SearchSpecProto.termMatchType}
@IntDef(value = {
TERM_MATCH_EXACT_ONLY,
TERM_MATCH_PREFIX
})
@Retention(RetentionPolicy.SOURCE)
public @interface TermMatch {
}
/**
* Query terms will only match exact tokens in the index.
* <p>Ex. A query term "foo" will only match indexed token "foo", and not "foot" or "football".
*/
public static final int TERM_MATCH_EXACT_ONLY = 1;
/**
* Query terms will match indexed tokens when the query term is a prefix of the token.
* <p>Ex. A query term "foo" will match indexed tokens like "foo", "foot", and "football".
*/
public static final int TERM_MATCH_PREFIX = 2;
/**
* Ranking Strategy for query result.
*
* @hide
*/
// NOTE: The integer values of these constants must match the proto enum constants in
// {@link ScoringSpecProto.RankingStrategy.Code}
@IntDef(value = {
RANKING_STRATEGY_NONE,
RANKING_STRATEGY_DOCUMENT_SCORE,
RANKING_STRATEGY_CREATION_TIMESTAMP,
RANKING_STRATEGY_RELEVANCE_SCORE,
RANKING_STRATEGY_USAGE_COUNT,
RANKING_STRATEGY_USAGE_LAST_USED_TIMESTAMP,
RANKING_STRATEGY_SYSTEM_USAGE_COUNT,
RANKING_STRATEGY_SYSTEM_USAGE_LAST_USED_TIMESTAMP,
})
@Retention(RetentionPolicy.SOURCE)
public @interface RankingStrategy {
}
/** No Ranking, results are returned in arbitrary order. */
public static final int RANKING_STRATEGY_NONE = 0;
/** Ranked by app-provided document scores. */
public static final int RANKING_STRATEGY_DOCUMENT_SCORE = 1;
/** Ranked by document creation timestamps. */
public static final int RANKING_STRATEGY_CREATION_TIMESTAMP = 2;
/** Ranked by document relevance score. */
public static final int RANKING_STRATEGY_RELEVANCE_SCORE = 3;
/** Ranked by number of usages, as reported by the app. */
public static final int RANKING_STRATEGY_USAGE_COUNT = 4;
/** Ranked by timestamp of last usage, as reported by the app. */
public static final int RANKING_STRATEGY_USAGE_LAST_USED_TIMESTAMP = 5;
/** Ranked by number of usages from a system UI surface. */
public static final int RANKING_STRATEGY_SYSTEM_USAGE_COUNT = 6;
/** Ranked by timestamp of last usage from a system UI surface. */
public static final int RANKING_STRATEGY_SYSTEM_USAGE_LAST_USED_TIMESTAMP = 7;
/**
* Order for query result.
*
* @hide
*/
// NOTE: The integer values of these constants must match the proto enum constants in
// {@link ScoringSpecProto.Order.Code}
@IntDef(value = {
ORDER_DESCENDING,
ORDER_ASCENDING
})
@Retention(RetentionPolicy.SOURCE)
public @interface Order {
}
/** Search results will be returned in a descending order. */
public static final int ORDER_DESCENDING = 0;
/** Search results will be returned in an ascending order. */
public static final int ORDER_ASCENDING = 1;
/**
* Grouping type for result limits.
*
* @hide
*/
@IntDef(flag = true, value = {
GROUPING_TYPE_PER_PACKAGE,
GROUPING_TYPE_PER_NAMESPACE
})
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@Retention(RetentionPolicy.SOURCE)
public @interface GroupingType {
}
/**
* Results should be grouped together by package for the purpose of enforcing a limit on the
* number of results returned per package.
*/
public static final int GROUPING_TYPE_PER_PACKAGE = 0b01;
/**
* Results should be grouped together by namespace for the purpose of enforcing a limit on the
* number of results returned per namespace.
*/
public static final int GROUPING_TYPE_PER_NAMESPACE = 0b10;
private final Bundle mBundle;
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public SearchSpec(@NonNull Bundle bundle) {
Preconditions.checkNotNull(bundle);
mBundle = bundle;
}
/**
* Returns the {@link Bundle} populated by this builder.
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@NonNull
public Bundle getBundle() {
return mBundle;
}
/** Returns how the query terms should match terms in the index. */
public @TermMatch int getTermMatch() {
return mBundle.getInt(TERM_MATCH_TYPE_FIELD, -1);
}
/**
* Returns the list of schema types to search for.
*
* <p>If empty, the query will search over all schema types.
*/
@NonNull
public List<String> getFilterSchemas() {
List<String> schemas = mBundle.getStringArrayList(SCHEMA_FIELD);
if (schemas == null) {
return Collections.emptyList();
}
return Collections.unmodifiableList(schemas);
}
/**
* Returns the list of namespaces to search over.
*
* <p>If empty, the query will search over all namespaces.
*/
@NonNull
public List<String> getFilterNamespaces() {
List<String> namespaces = mBundle.getStringArrayList(NAMESPACE_FIELD);
if (namespaces == null) {
return Collections.emptyList();
}
return Collections.unmodifiableList(namespaces);
}
/**
* Returns the list of package name filters to search over.
*
* <p>If empty, the query will search over all packages that the caller has access to. If
* package names are specified which caller doesn't have access to, then those package names
* will be ignored.
*/
@NonNull
public List<String> getFilterPackageNames() {
List<String> packageNames = mBundle.getStringArrayList(PACKAGE_NAME_FIELD);
if (packageNames == null) {
return Collections.emptyList();
}
return Collections.unmodifiableList(packageNames);
}
/** Returns the number of results per page in the result set. */
public int getResultCountPerPage() {
return mBundle.getInt(NUM_PER_PAGE_FIELD, DEFAULT_NUM_PER_PAGE);
}
/** Returns the ranking strategy. */
public @RankingStrategy int getRankingStrategy() {
return mBundle.getInt(RANKING_STRATEGY_FIELD);
}
/** Returns the order of returned search results (descending or ascending). */
public @Order int getOrder() {
return mBundle.getInt(ORDER_FIELD);
}
/** Returns how many documents to generate snippets for. */
public int getSnippetCount() {
return mBundle.getInt(SNIPPET_COUNT_FIELD);
}
/**
* Returns how many matches for each property of a matching document to generate snippets for.
*/
public int getSnippetCountPerProperty() {
return mBundle.getInt(SNIPPET_COUNT_PER_PROPERTY_FIELD);
}
/** Returns the maximum size of a snippet in characters. */
public int getMaxSnippetSize() {
return mBundle.getInt(MAX_SNIPPET_FIELD);
}
/**
* Returns a map from schema type to property paths to be used for projection.
*
* <p>If the map is empty, then all properties will be retrieved for all results.
*
* <p>Calling this function repeatedly is inefficient. Prefer to retain the Map returned
* by this function, rather than calling it multiple times.
*/
@NonNull
public Map<String, List<String>> getProjections() {
Bundle typePropertyPathsBundle = mBundle.getBundle(PROJECTION_TYPE_PROPERTY_PATHS_FIELD);
Set<String> schemas = typePropertyPathsBundle.keySet();
Map<String, List<String>> typePropertyPathsMap = new ArrayMap<>(schemas.size());
for (String schema : schemas) {
typePropertyPathsMap.put(schema, typePropertyPathsBundle.getStringArrayList(schema));
}
return typePropertyPathsMap;
}
/**
* Get the type of grouping limit to apply, or 0 if {@link Builder#setResultGrouping} was not
* called.
*/
public @GroupingType int getResultGroupingTypeFlags() {
return mBundle.getInt(RESULT_GROUPING_TYPE_FLAGS);
}
/**
* Get the maximum number of results to return for each group.
*
* @return the maximum number of results to return for each group or Integer.MAX_VALUE if
* {@link Builder#setResultGrouping(int, int)} was not called.
*/
public int getResultGroupingLimit() {
return mBundle.getInt(RESULT_GROUPING_LIMIT, Integer.MAX_VALUE);
}
/** Builder for {@link SearchSpec objects}. */
public static final class Builder {
private ArrayList<String> mSchemas = new ArrayList<>();
private ArrayList<String> mNamespaces = new ArrayList<>();
private ArrayList<String> mPackageNames = new ArrayList<>();
private Bundle mProjectionTypePropertyMasks = new Bundle();
private int mResultCountPerPage = DEFAULT_NUM_PER_PAGE;
private @TermMatch int mTermMatchType = TERM_MATCH_PREFIX;
private int mSnippetCount = 0;
private int mSnippetCountPerProperty = MAX_SNIPPET_PER_PROPERTY_COUNT;
private int mMaxSnippetSize = 0;
private @RankingStrategy int mRankingStrategy = RANKING_STRATEGY_NONE;
private @Order int mOrder = ORDER_DESCENDING;
private @GroupingType int mGroupingTypeFlags = 0;
private int mGroupingLimit = 0;
private boolean mBuilt = false;
/**
* Indicates how the query terms should match {@code TermMatchCode} in the index.
*
* <p>If this method is not called, the default term match type is
* {@link SearchSpec#TERM_MATCH_PREFIX}.
*/
@NonNull
public Builder setTermMatch(@TermMatch int termMatchType) {
Preconditions.checkArgumentInRange(termMatchType, TERM_MATCH_EXACT_ONLY,
TERM_MATCH_PREFIX, "Term match type");
resetIfBuilt();
mTermMatchType = termMatchType;
return this;
}
/**
* Adds a Schema type filter to {@link SearchSpec} Entry. Only search for documents that
* have the specified schema types.
*
* <p>If unset, the query will search over all schema types.
*/
@NonNull
public Builder addFilterSchemas(@NonNull String... schemas) {
Preconditions.checkNotNull(schemas);
resetIfBuilt();
return addFilterSchemas(Arrays.asList(schemas));
}
/**
* Adds a Schema type filter to {@link SearchSpec} Entry. Only search for documents that
* have the specified schema types.
*
* <p>If unset, the query will search over all schema types.
*/
@NonNull
public Builder addFilterSchemas(@NonNull Collection<String> schemas) {
Preconditions.checkNotNull(schemas);
resetIfBuilt();
mSchemas.addAll(schemas);
return this;
}
// @exportToFramework:startStrip()
/**
* Adds the Schema names of given document classes to the Schema type filter of
* {@link SearchSpec} Entry. Only search for documents that have the specified schema types.
*
* <p>If unset, the query will search over all schema types.
*
* @param documentClasses classes annotated with {@link Document}.
*/
// Merged list available from getFilterSchemas
@SuppressLint("MissingGetterMatchingBuilder")
@NonNull
public Builder addFilterDocumentClasses(
@NonNull Collection<? extends Class<?>> documentClasses) throws AppSearchException {
Preconditions.checkNotNull(documentClasses);
resetIfBuilt();
List<String> schemas = new ArrayList<>(documentClasses.size());
DocumentClassFactoryRegistry registry = DocumentClassFactoryRegistry.getInstance();
for (Class<?> documentClass : documentClasses) {
DocumentClassFactory<?> factory = registry.getOrCreateFactory(documentClass);
schemas.add(factory.getSchemaName());
}
addFilterSchemas(schemas);
return this;
}
// @exportToFramework:endStrip()
// @exportToFramework:startStrip()
/**
* Adds the Schema names of given document classes to the Schema type filter of
* {@link SearchSpec} Entry. Only search for documents that have the specified schema types.
*
* <p>If unset, the query will search over all schema types.
*
* @param documentClasses classes annotated with {@link Document}.
*/
// Merged list available from getFilterSchemas()
@SuppressLint("MissingGetterMatchingBuilder")
@NonNull
public Builder addFilterDocumentClasses(@NonNull Class<?>... documentClasses)
throws AppSearchException {
Preconditions.checkNotNull(documentClasses);
resetIfBuilt();
return addFilterDocumentClasses(Arrays.asList(documentClasses));
}
// @exportToFramework:endStrip()
/**
* Adds a namespace filter to {@link SearchSpec} Entry. Only search for documents that
* have the specified namespaces.
* <p>If unset, the query will search over all namespaces.
*/
@NonNull
public Builder addFilterNamespaces(@NonNull String... namespaces) {
Preconditions.checkNotNull(namespaces);
resetIfBuilt();
return addFilterNamespaces(Arrays.asList(namespaces));
}
/**
* Adds a namespace filter to {@link SearchSpec} Entry. Only search for documents that
* have the specified namespaces.
* <p>If unset, the query will search over all namespaces.
*/
@NonNull
public Builder addFilterNamespaces(@NonNull Collection<String> namespaces) {
Preconditions.checkNotNull(namespaces);
resetIfBuilt();
mNamespaces.addAll(namespaces);
return this;
}
/**
* Adds a package name filter to {@link SearchSpec} Entry. Only search for documents that
* were indexed from the specified packages.
*
* <p>If unset, the query will search over all packages that the caller has access to.
* If package names are specified which caller doesn't have access to, then those package
* names will be ignored.
*/
@NonNull
public Builder addFilterPackageNames(@NonNull String... packageNames) {
Preconditions.checkNotNull(packageNames);
resetIfBuilt();
return addFilterPackageNames(Arrays.asList(packageNames));
}
/**
* Adds a package name filter to {@link SearchSpec} Entry. Only search for documents that
* were indexed from the specified packages.
*
* <p>If unset, the query will search over all packages that the caller has access to.
* If package names are specified which caller doesn't have access to, then those package
* names will be ignored.
*/
@NonNull
public Builder addFilterPackageNames(@NonNull Collection<String> packageNames) {
Preconditions.checkNotNull(packageNames);
resetIfBuilt();
mPackageNames.addAll(packageNames);
return this;
}
/**
* Sets the number of results per page in the returned object.
*
* <p>The default number of results per page is 10.
*/
@NonNull
public SearchSpec.Builder setResultCountPerPage(
@IntRange(from = 0, to = MAX_NUM_PER_PAGE) int resultCountPerPage) {
Preconditions.checkArgumentInRange(
resultCountPerPage, 0, MAX_NUM_PER_PAGE, "resultCountPerPage");
resetIfBuilt();
mResultCountPerPage = resultCountPerPage;
return this;
}
/** Sets ranking strategy for AppSearch results. */
@NonNull
public Builder setRankingStrategy(@RankingStrategy int rankingStrategy) {
Preconditions.checkArgumentInRange(rankingStrategy, RANKING_STRATEGY_NONE,
RANKING_STRATEGY_SYSTEM_USAGE_LAST_USED_TIMESTAMP, "Result ranking strategy");
resetIfBuilt();
mRankingStrategy = rankingStrategy;
return this;
}
/**
* Indicates the order of returned search results, the default is
* {@link #ORDER_DESCENDING}, meaning that results with higher scores come first.
*
* <p>This order field will be ignored if RankingStrategy = {@code RANKING_STRATEGY_NONE}.
*/
@NonNull
public Builder setOrder(@Order int order) {
Preconditions.checkArgumentInRange(order, ORDER_DESCENDING, ORDER_ASCENDING,
"Result ranking order");
resetIfBuilt();
mOrder = order;
return this;
}
/**
* Only the first {@code snippetCount} documents based on the ranking strategy
* will have snippet information provided.
*
* <p>The list returned from {@link SearchResult#getMatchInfos} will contain at most this
* many entries.
*
* <p>If set to 0 (default), snippeting is disabled and the list returned from
* {@link SearchResult#getMatchInfos} will be empty.
*/
@NonNull
public SearchSpec.Builder setSnippetCount(
@IntRange(from = 0, to = MAX_SNIPPET_COUNT) int snippetCount) {
Preconditions.checkArgumentInRange(snippetCount, 0, MAX_SNIPPET_COUNT, "snippetCount");
resetIfBuilt();
mSnippetCount = snippetCount;
return this;
}
/**
* Sets {@code snippetCountPerProperty}. Only the first {@code snippetCountPerProperty}
* snippets for each property of each {@link GenericDocument} will contain snippet
* information.
*
* <p>If set to 0, snippeting is disabled and the list
* returned from {@link SearchResult#getMatchInfos} will be empty.
*
* <p>The default behavior is to snippet all matches a property contains, up to the maximum
* value of 10,000.
*/
@NonNull
public SearchSpec.Builder setSnippetCountPerProperty(
@IntRange(from = 0, to = MAX_SNIPPET_PER_PROPERTY_COUNT)
int snippetCountPerProperty) {
Preconditions.checkArgumentInRange(snippetCountPerProperty,
0, MAX_SNIPPET_PER_PROPERTY_COUNT, "snippetCountPerProperty");
resetIfBuilt();
mSnippetCountPerProperty = snippetCountPerProperty;
return this;
}
/**
* Sets {@code maxSnippetSize}, the maximum snippet size. Snippet windows start at
* {@code maxSnippetSize/2} bytes before the middle of the matching token and end at
* {@code maxSnippetSize/2} bytes after the middle of the matching token. It respects
* token boundaries, therefore the returned window may be smaller than requested.
*
* <p> Setting {@code maxSnippetSize} to 0 will disable windowing and an empty string will
* be returned. If matches enabled is also set to false, then snippeting is disabled.
*
* <p>Ex. {@code maxSnippetSize} = 16. "foo bar baz bat rat" with a query of "baz" will
* return a window of "bar baz bat" which is only 11 bytes long.
*/
@NonNull
public SearchSpec.Builder setMaxSnippetSize(
@IntRange(from = 0, to = MAX_SNIPPET_SIZE_LIMIT) int maxSnippetSize) {
Preconditions.checkArgumentInRange(
maxSnippetSize, 0, MAX_SNIPPET_SIZE_LIMIT, "maxSnippetSize");
resetIfBuilt();
mMaxSnippetSize = maxSnippetSize;
return this;
}
/**
* Adds property paths for the specified type to be used for projection. If property
* paths are added for a type, then only the properties referred to will be retrieved for
* results of that type. If a property path that is specified isn't present in a result,
* it will be ignored for that result. Property paths cannot be null.
*
* <p>If no property paths are added for a particular type, then all properties of
* results of that type will be retrieved.
*
* <p>If property path is added for the
* {@link SearchSpec#PROJECTION_SCHEMA_TYPE_WILDCARD}, then those property paths will
* apply to all results, excepting any types that have their own, specific property paths
* set.
*
* <p>Suppose the following document is in the index.
* <pre>{@code
* Email: Document {
* sender: Document {
* name: "Mr. Person"
* email: "mrperson123@google.com"
* }
* recipients: [
* Document {
* name: "John Doe"
* email: "johndoe123@google.com"
* }
* Document {
* name: "Jane Doe"
* email: "janedoe123@google.com"
* }
* ]
* subject: "IMPORTANT"
* body: "Limited time offer!"
* }
* }</pre>
*
* <p>Then, suppose that a query for "important" is issued with the following projection
* type property paths:
* <pre>{@code
* {schema: "Email", ["subject", "sender.name", "recipients.name"]}
* }</pre>
*
* <p>The above document will be returned as:
* <pre>{@code
* Email: Document {
* sender: Document {
* name: "Mr. Body"
* }
* recipients: [
* Document {
* name: "John Doe"
* }
* Document {
* name: "Jane Doe"
* }
* ]
* subject: "IMPORTANT"
* }
* }</pre>
*/
@NonNull
public SearchSpec.Builder addProjection(
@NonNull String schema, @NonNull Collection<String> propertyPaths) {
Preconditions.checkNotNull(schema);
Preconditions.checkNotNull(propertyPaths);
resetIfBuilt();
ArrayList<String> propertyPathsArrayList = new ArrayList<>(propertyPaths.size());
for (String propertyPath : propertyPaths) {
Preconditions.checkNotNull(propertyPath);
propertyPathsArrayList.add(propertyPath);
}
mProjectionTypePropertyMasks.putStringArrayList(schema, propertyPathsArrayList);
return this;
}
/**
* Set the maximum number of results to return for each group, where groups are defined
* by grouping type.
*
* <p>Calling this method will override any previous calls. So calling
* setResultGrouping(GROUPING_TYPE_PER_PACKAGE, 7) and then calling
* setResultGrouping(GROUPING_TYPE_PER_PACKAGE, 2) will result in only the latter, a
* limit of two results per package, being applied. Or calling setResultGrouping
* (GROUPING_TYPE_PER_PACKAGE, 1) and then calling setResultGrouping
* (GROUPING_TYPE_PER_PACKAGE | GROUPING_PER_NAMESPACE, 5) will result in five results
* per package per namespace.
*
* @param groupingTypeFlags One or more combination of grouping types.
* @param limit Number of results to return per {@code groupingTypeFlags}.
* @throws IllegalArgumentException if groupingTypeFlags is zero.
*/
// Individual parameters available from getResultGroupingTypeFlags and
// getResultGroupingLimit
@SuppressLint("MissingGetterMatchingBuilder")
@NonNull
public Builder setResultGrouping(@GroupingType int groupingTypeFlags, int limit) {
Preconditions.checkState(
groupingTypeFlags != 0, "Result grouping type cannot be zero.");
resetIfBuilt();
mGroupingTypeFlags = groupingTypeFlags;
mGroupingLimit = limit;
return this;
}
/** Constructs a new {@link SearchSpec} from the contents of this builder. */
@NonNull
public SearchSpec build() {
Bundle bundle = new Bundle();
bundle.putStringArrayList(SCHEMA_FIELD, mSchemas);
bundle.putStringArrayList(NAMESPACE_FIELD, mNamespaces);
bundle.putStringArrayList(PACKAGE_NAME_FIELD, mPackageNames);
bundle.putBundle(PROJECTION_TYPE_PROPERTY_PATHS_FIELD, mProjectionTypePropertyMasks);
bundle.putInt(NUM_PER_PAGE_FIELD, mResultCountPerPage);
bundle.putInt(TERM_MATCH_TYPE_FIELD, mTermMatchType);
bundle.putInt(SNIPPET_COUNT_FIELD, mSnippetCount);
bundle.putInt(SNIPPET_COUNT_PER_PROPERTY_FIELD, mSnippetCountPerProperty);
bundle.putInt(MAX_SNIPPET_FIELD, mMaxSnippetSize);
bundle.putInt(RANKING_STRATEGY_FIELD, mRankingStrategy);
bundle.putInt(ORDER_FIELD, mOrder);
bundle.putInt(RESULT_GROUPING_TYPE_FLAGS, mGroupingTypeFlags);
bundle.putInt(RESULT_GROUPING_LIMIT, mGroupingLimit);
mBuilt = true;
return new SearchSpec(bundle);
}
private void resetIfBuilt() {
if (mBuilt) {
mSchemas = new ArrayList<>(mSchemas);
mNamespaces = new ArrayList<>(mNamespaces);
mPackageNames = new ArrayList<>(mPackageNames);
mProjectionTypePropertyMasks = BundleUtil.deepCopy(mProjectionTypePropertyMasks);
mBuilt = false;
}
}
}
}
| |
/* Copyright (c) 2014-2016 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Gabriel Roldan (Boundless) - initial implementation
*/
package org.locationtech.geogig.storage.datastream;
import static com.google.common.base.Preconditions.checkState;
import static java.lang.Integer.toBinaryString;
import static org.locationtech.geogig.storage.datastream.Varint.readSignedVarLong;
import static org.locationtech.geogig.storage.datastream.Varint.readUnsignedVarInt;
import static org.locationtech.geogig.storage.datastream.Varint.readUnsignedVarLong;
import static org.locationtech.geogig.storage.datastream.Varint.writeSignedVarLong;
import static org.locationtech.geogig.storage.datastream.Varint.writeUnsignedVarInt;
import static org.locationtech.geogig.storage.datastream.Varint.writeUnsignedVarLong;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.eclipse.jdt.annotation.Nullable;
import org.geotools.feature.NameImpl;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.feature.type.BasicFeatureTypes;
import org.geotools.referencing.CRS;
import org.geotools.referencing.CRS.AxisOrder;
import org.geotools.referencing.wkt.Formattable;
import org.locationtech.geogig.model.Bucket;
import org.locationtech.geogig.model.FieldType;
import org.locationtech.geogig.model.Node;
import org.locationtech.geogig.model.ObjectId;
import org.locationtech.geogig.model.RevCommit;
import org.locationtech.geogig.model.RevFeature;
import org.locationtech.geogig.model.RevFeatureType;
import org.locationtech.geogig.model.RevObject;
import org.locationtech.geogig.model.RevObject.TYPE;
import org.locationtech.geogig.model.impl.CommitBuilder;
import org.locationtech.geogig.model.impl.RevFeatureBuilder;
import org.locationtech.geogig.model.impl.RevFeatureTypeBuilder;
import org.locationtech.geogig.model.impl.RevPersonBuilder;
import org.locationtech.geogig.model.impl.RevTagBuilder;
import org.locationtech.geogig.model.impl.RevTreeBuilder;
import org.locationtech.geogig.model.RevPerson;
import org.locationtech.geogig.model.RevTag;
import org.locationtech.geogig.model.RevTree;
import org.locationtech.geogig.plumbing.HashObject;
import org.locationtech.geogig.repository.DiffEntry;
import org.locationtech.geogig.repository.NodeRef;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.feature.type.AttributeType;
import org.opengis.feature.type.FeatureTypeFactory;
import org.opengis.feature.type.GeometryType;
import org.opengis.feature.type.Name;
import org.opengis.feature.type.PropertyDescriptor;
import org.opengis.feature.type.PropertyType;
import org.opengis.filter.Filter;
import org.opengis.referencing.FactoryException;
import org.opengis.referencing.NoSuchAuthorityCodeException;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.math.DoubleMath;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
public class FormatCommonV2 {
public final static byte NUL = 0x00;
public static final FormatCommonV2 INSTANCE = new FormatCommonV2();
public final String readToMarker(DataInput in, byte marker) throws IOException {
StringBuilder buff = new StringBuilder();
byte b = in.readByte();
while (b != marker) {
buff.append((char) b);
b = in.readByte();
}
return buff.toString();
}
public final ObjectId readObjectId(DataInput in) throws IOException {
byte[] bytes = new byte[ObjectId.NUM_BYTES];
in.readFully(bytes);
return ObjectId.createNoClone(bytes);
}
/**
* Constant for reading TREE objects. Indicates that the end of the tree object has been
* reached.
*/
public static final byte NO_MORE_NODES = 0x00;
/**
* Constant for reading TREE objects. Indicates that the next entry is a subtree node or a
* features node.
*/
public static final byte NODE = 0x01;
/**
* Constant for reading TREE objects. Indicates that the next entry is a bucket.
*/
public static final byte BUCKET = 0x02;
/**
* The featuretype factory to use when calling code does not provide one.
*/
private static final FeatureTypeFactory DEFAULT_FEATURETYPE_FACTORY = new SimpleFeatureTypeBuilder()
.getFeatureTypeFactory();
public RevTag readTag(@Nullable ObjectId id, DataInput in) throws IOException {
final ObjectId commitId = readObjectId(in);
final String name = in.readUTF();
final String message = in.readUTF();
final RevPerson tagger = readRevPerson(in);
RevTag tag;
if (id == null) {
tag = RevTagBuilder.build(ObjectId.NULL, name, commitId, message, tagger);
id = new HashObject().setObject(tag).call();
}
tag = RevTagBuilder.build(id, name, commitId, message, tagger);
return tag;
}
public void writeTag(RevTag tag, DataOutput out) throws IOException {
out.write(tag.getCommitId().getRawValue());
out.writeUTF(tag.getName());
out.writeUTF(tag.getMessage());
writePerson(tag.getTagger(), out);
}
public void writeCommit(RevCommit commit, DataOutput data) throws IOException {
data.write(commit.getTreeId().getRawValue());
final int nParents = commit.getParentIds().size();
writeUnsignedVarInt(nParents, data);
for (ObjectId pId : commit.getParentIds()) {
data.write(pId.getRawValue());
}
writePerson(commit.getAuthor(), data);
writePerson(commit.getCommitter(), data);
data.writeUTF(commit.getMessage());
}
public RevCommit readCommit(@Nullable ObjectId id, DataInput in) throws IOException {
final ObjectId treeId = readObjectId(in);
final int nParents = readUnsignedVarInt(in);
final Builder<ObjectId> parentListBuilder = ImmutableList.builder();
for (int i = 0; i < nParents; i++) {
ObjectId parentId = readObjectId(in);
parentListBuilder.add(parentId);
}
final RevPerson author = readRevPerson(in);
final RevPerson committer = readRevPerson(in);
final String message = in.readUTF();
ObjectId commitId = id;
if (id == null) {
commitId = ObjectId.NULL;
}
RevCommit commit = CommitBuilder.build(commitId, treeId, parentListBuilder.build(), author,
committer, message);
if (id == null) {
commitId = new HashObject().setObject(commit).call();
commit = CommitBuilder.build(commitId, treeId, parentListBuilder.build(), author,
committer, message);
}
return commit;
}
public final RevPerson readRevPerson(DataInput in) throws IOException {
final String name = in.readUTF();
final String email = in.readUTF();
final long timestamp = readUnsignedVarLong(in);
final int tzOffset = readUnsignedVarInt(in);
return RevPersonBuilder.build(name.length() == 0 ? null : name,
email.length() == 0 ? null : email, timestamp, tzOffset);
}
public final void writePerson(RevPerson person, DataOutput data) throws IOException {
data.writeUTF(person.getName().or(""));
data.writeUTF(person.getEmail().or(""));
writeUnsignedVarLong(person.getTimestamp(), data);
writeUnsignedVarInt(person.getTimeZoneOffset(), data);
}
public void writeTree(RevTree tree, DataOutput data) throws IOException {
writeUnsignedVarLong(tree.size(), data);
writeUnsignedVarInt(tree.numTrees(), data);
Envelope envBuff = new Envelope();
final int nFeatures = tree.features().size();
writeUnsignedVarInt(nFeatures, data);
for (Node feature : tree.features()) {
writeNode(feature, data, envBuff);
}
final int nTrees = tree.trees().size();
writeUnsignedVarInt(nTrees, data);
for (Node subTree : tree.trees()) {
writeNode(subTree, data, envBuff);
}
ImmutableSortedMap<Integer, Bucket> buckets = tree.buckets();
final int nBuckets = buckets.size();
writeUnsignedVarInt(nBuckets, data);
for (Map.Entry<Integer, Bucket> bucket : buckets.entrySet()) {
writeBucket(bucket.getKey(), bucket.getValue(), data, envBuff);
}
}
public RevTree readTree(@Nullable ObjectId id, DataInput in) throws IOException {
final long size = readUnsignedVarLong(in);
final int treeCount = readUnsignedVarInt(in);
final ImmutableList.Builder<Node> featuresBuilder = new ImmutableList.Builder<Node>();
final ImmutableList.Builder<Node> treesBuilder = new ImmutableList.Builder<Node>();
final int nFeatures = readUnsignedVarInt(in);
for (int i = 0; i < nFeatures; i++) {
Node n = readNode(in);
checkState(RevObject.TYPE.FEATURE.equals(n.getType()),
"Non-feature node in tree's feature list.");
featuresBuilder.add(n);
}
final int nTrees = readUnsignedVarInt(in);
for (int i = 0; i < nTrees; i++) {
Node n = readNode(in);
checkState(RevObject.TYPE.TREE.equals(n.getType()),
"Non-tree node in tree's subtree list %s->%s.", n.getType(), n);
treesBuilder.add(n);
}
final int nBuckets = readUnsignedVarInt(in);
final SortedMap<Integer, Bucket> buckets;
buckets = nBuckets > 0 ? new TreeMap<>() : ImmutableSortedMap.of();
for (int i = 0; i < nBuckets; i++) {
int bucketIndex = readUnsignedVarInt(in);
{
Integer idx = Integer.valueOf(bucketIndex);
checkState(!buckets.containsKey(idx), "duplicate bucket index: %s", idx);
// checkState(bucketIndex < RevTree.MAX_BUCKETS, "Illegal bucket index: %s", idx);
}
Bucket bucket = readBucketBody(in);
buckets.put(Integer.valueOf(bucketIndex), bucket);
}
checkState(nBuckets == buckets.size(), "expected %s buckets, got %s", nBuckets,
buckets.size());
ImmutableList<Node> trees = treesBuilder.build();
ImmutableList<Node> features = featuresBuilder.build();
if (id == null) {
id = HashObject.hashTree(trees, features, ImmutableSortedMap.copyOf(buckets));
}
RevTree tree = RevTreeBuilder.create(id, size, treeCount, trees, features, buckets);
return tree;
}
public DiffEntry readDiff(DataInput in) throws IOException {
boolean oldNode = in.readBoolean();
NodeRef oldNodeRef = null;
if (oldNode) {
oldNodeRef = readNodeRef(in);
}
boolean newNode = in.readBoolean();
NodeRef newNodeRef = null;
if (newNode) {
newNodeRef = readNodeRef(in);
}
return new DiffEntry(oldNodeRef, newNodeRef);
}
public NodeRef readNodeRef(DataInput in) throws IOException {
Node node = readNode(in);
final ObjectId metadataId = readObjectId(in);
String parentPath = in.readUTF();
return new NodeRef(node, parentPath, metadataId);
}
public void writeFeature(RevFeature feature, DataOutput data) throws IOException {
writeUnsignedVarInt(feature.size(), data);
for (int i = 0; i < feature.size(); i++) {
Optional<Object> field = feature.get(i);
FieldType type = FieldType.forValue(field);
data.writeByte(type.getTag());
if (type != FieldType.NULL) {
DataStreamValueSerializerV2.write(field, data);
}
}
}
public RevFeature readFeature(@Nullable ObjectId id, DataInput in) throws IOException {
final int count = readUnsignedVarInt(in);
final RevFeatureBuilder builder = RevFeatureBuilder.builder();
for (int i = 0; i < count; i++) {
final byte fieldTag = in.readByte();
final FieldType fieldType = FieldType.valueOf(fieldTag);
Object value = DataStreamValueSerializerV2.read(fieldType, in);
builder.addValueNoCopy(value);
}
RevFeature built = id == null ? builder.build() : builder.build(id);
return built;
}
public void writeHeader(DataOutput data, RevObject.TYPE header) throws IOException {
data.writeByte(header.value());
}
public TYPE readHeader(DataInput in) throws IOException {
final int header = in.readByte() & 0xFF;
checkState(header > -1 && header < 6,
"Illegal RevObject type header: %s, must be between 0 and 4 inclusive",
Integer.valueOf(header));
final RevObject.TYPE type = TYPE.valueOf(header);
return type;
}
public final void requireHeader(DataInput in, RevObject.TYPE header) throws IOException {
int s = in.readByte() & 0xFF;
if (header.value() != s) {
throw new IllegalArgumentException(String.format(
"Expected header %s(%d), but actually got %d", header, header.value(), s));
}
}
private static void writeBoundingBox(double minx, double maxx, double miny, double maxy,
DataOutput data) throws IOException {
long x1 = toFixedPrecision(minx, RoundingMode.HALF_DOWN);
long y1 = toFixedPrecision(miny, RoundingMode.HALF_DOWN);
long x2 = toFixedPrecision(maxx, RoundingMode.HALF_UP);
long y2 = toFixedPrecision(maxy, RoundingMode.HALF_UP);
writeSignedVarLong(x1, data);
writeSignedVarLong(y1, data);
writeSignedVarLong(x2, data);
writeSignedVarLong(y2, data);
}
private static Envelope readBoundingBox(DataInput in) throws IOException {
final long x1 = readSignedVarLong(in);
final long y1 = readSignedVarLong(in);
final long x2 = readSignedVarLong(in);
final long y2 = readSignedVarLong(in);
final double minx = toDoublePrecision(x1);
final double maxx = toDoublePrecision(x2);
final double miny = toDoublePrecision(y1);
final double maxy = toDoublePrecision(y2);
return new Envelope(minx, maxx, miny, maxy);
}
public void writePointBoundingBox(double x, double y, DataOutput data) throws IOException {
long x1 = toFixedPrecision(x);
long y1 = toFixedPrecision(y);
writeSignedVarLong(x1, data);
writeSignedVarLong(y1, data);
}
public Envelope readPointBoundingBox(DataInput in) throws IOException {
final long x1 = readSignedVarLong(in);
final long y1 = readSignedVarLong(in);
final double x = toDoublePrecision(x1);
final double y = toDoublePrecision(y1);
return new Envelope(x, x, y, y);
}
private static final double FIXED_PRECISION_FACTOR = 10_000_000D;
/**
* Converts the requested coordinate from double to fixed precision.
*/
private static long toFixedPrecision(double ordinate) {
long fixedPrecisionOrdinate = Math.round(ordinate * FIXED_PRECISION_FACTOR);
return fixedPrecisionOrdinate;
}
private static long toFixedPrecision(double ordinate, RoundingMode mode) {
long fixedPrecisionOrdinate = DoubleMath.roundToLong(ordinate * FIXED_PRECISION_FACTOR,
mode);
return fixedPrecisionOrdinate;
}
/**
* Converts the requested coordinate from fixed to double precision.
*/
private static double toDoublePrecision(long fixedPrecisionOrdinate) {
double ordinate = (double) fixedPrecisionOrdinate / FIXED_PRECISION_FACTOR;
return ordinate;
}
public void writeBucket(final int index, final Bucket bucket, DataOutput data, Envelope envBuff)
throws IOException {
writeUnsignedVarInt(index, data);
data.write(bucket.getObjectId().getRawValue());
envBuff.setToNull();
bucket.expand(envBuff);
if (envBuff.isNull()) {
data.writeByte(BOUNDS_NULL_MASK);
} else if (envBuff.getWidth() == 0D && envBuff.getHeight() == 0D) {
data.writeByte(BOUNDS_POINT_MASK);
writePointBoundingBox(envBuff.getMinX(), envBuff.getMinY(), data);
} else {
data.writeByte(BOUNDS_BOX2D_MASK);
writeBoundingBox(envBuff.getMinX(), envBuff.getMaxX(), envBuff.getMinY(),
envBuff.getMaxY(), data);
}
}
/**
* Reads a bucket body (i.e assumes the head unsigned int "index" has been read already)
*/
private final Bucket readBucketBody(DataInput in) throws IOException {
ObjectId objectId = readObjectId(in);
final int boundsMask = in.readByte() & 0xFF;
@Nullable
final Envelope bounds;
if (BOUNDS_POINT_MASK == boundsMask) {
bounds = readPointBoundingBox(in);
} else if (BOUNDS_BOX2D_MASK == boundsMask) {
bounds = readBoundingBox(in);
} else {
bounds = null;
}
return Bucket.create(objectId, bounds);
}
public void writeNode(Node node, DataOutput data) throws IOException {
writeNode(node, data, new Envelope());
}
static final int BOUNDS_NULL_MASK = 0b00000;
static final int BOUNDS_POINT_MASK = 0b01000;
static final int BOUNDS_BOX2D_MASK = 0b10000;
static final int METADATA_PRESENT_MASK = 0b100000;
static final int METADATA_ABSENT_MASK = 0b000000;
static final int METADATA_READ_MASK = 0b100000;
static final int EXTRA_DATA_PRESENT_MASK = 0b1000000;
static final int EXTRA_DATA_ABSENT_MASK = 0b0000000;
static final int EXTRA_DATA_READ_MASK = 0b1000000;
static final int BOUNDS_READ_MASK = 0b011000;
static final int TYPE_READ_MASK = 0b000111;
public void writeNode(Node node, DataOutput data, Envelope env) throws IOException {
// Encode the node type and the bounds and metadata presence masks in one single byte:
// - bits 1-3 for the object type (up to 8 types, there are only 5 and no plans to add more)
// - bits 4-5 bits for the bounds mask
// - bit 6 metadata id present(1) or absent(0)
// - bit 7 extra data present(1) or absent(0)
// - bit 8 unused
final int nodeType = node.getType().value();
final int boundsMask;
final int metadataMask;
final int extraDataMask;
env.setToNull();
node.expand(env);
if (env.isNull()) {
boundsMask = BOUNDS_NULL_MASK;
} else if (env.getWidth() == 0D && env.getHeight() == 0D) {
boundsMask = BOUNDS_POINT_MASK;
} else {
boundsMask = BOUNDS_BOX2D_MASK;
}
@Nullable
final Map<String, Object> extraData = node.getExtraData();
metadataMask = node.getMetadataId().isPresent() ? METADATA_PRESENT_MASK
: METADATA_ABSENT_MASK;
extraDataMask = extraData == null || extraData.isEmpty() ? EXTRA_DATA_ABSENT_MASK
: EXTRA_DATA_PRESENT_MASK;
// encode type and bounds mask together
final int typeAndMasks = nodeType | boundsMask | metadataMask | extraDataMask;
data.writeByte(typeAndMasks);
data.writeUTF(node.getName());
data.write(node.getObjectId().getRawValue());
if (metadataMask == METADATA_PRESENT_MASK) {
data.write(node.getMetadataId().or(ObjectId.NULL).getRawValue());
}
if (BOUNDS_BOX2D_MASK == boundsMask) {
writeBoundingBox(env.getMinX(), env.getMaxX(), env.getMinY(), env.getMaxY(), data);
} else if (BOUNDS_POINT_MASK == boundsMask) {
writePointBoundingBox(env.getMinX(), env.getMinY(), data);
}
if (extraDataMask == EXTRA_DATA_PRESENT_MASK) {
DataStreamValueSerializerV2.write(extraData, data);
}
}
@SuppressWarnings("unchecked")
public Node readNode(DataInput in) throws IOException {
final int typeAndMasks = in.readByte() & 0xFF;
final int nodeType = typeAndMasks & TYPE_READ_MASK;
final int boundsMask = typeAndMasks & BOUNDS_READ_MASK;
final int metadataMask = typeAndMasks & METADATA_READ_MASK;
final int extraDataMask = typeAndMasks & EXTRA_DATA_READ_MASK;
final RevObject.TYPE contentType = RevObject.TYPE.valueOf(nodeType);
final String name = in.readUTF();
final ObjectId objectId = readObjectId(in);
ObjectId metadataId = ObjectId.NULL;
if (metadataMask == METADATA_PRESENT_MASK) {
metadataId = readObjectId(in);
}
@Nullable
final Envelope bbox;
if (boundsMask == BOUNDS_NULL_MASK) {
bbox = null;
} else if (boundsMask == BOUNDS_POINT_MASK) {
bbox = readPointBoundingBox(in);
} else if (boundsMask == BOUNDS_BOX2D_MASK) {
bbox = readBoundingBox(in);
} else {
throw new IllegalStateException(
String.format("Illegal bounds mask: %s, expected one of %s, %s, %s",
toBinaryString(boundsMask), toBinaryString(BOUNDS_NULL_MASK),
toBinaryString(BOUNDS_POINT_MASK), toBinaryString(BOUNDS_BOX2D_MASK)));
}
Map<String, Object> extraData = null;
if (extraDataMask == EXTRA_DATA_PRESENT_MASK) {
Object extra = DataStreamValueSerializerV2.read(FieldType.MAP, in);
Preconditions.checkState(extra instanceof Map);
extraData = (Map<String, Object>) extra;
}
final Node node;
node = Node.create(name, objectId, metadataId, contentType, bbox, extraData);
return node;
}
public void writeDiff(DiffEntry diff, DataOutput data) throws IOException {
if (diff.getOldObject() == null) {
data.writeBoolean(false);
} else {
data.writeBoolean(true);
writeNodeRef(diff.getOldObject(), data);
}
if (diff.getNewObject() == null) {
data.writeBoolean(false);
} else {
data.writeBoolean(true);
writeNodeRef(diff.getNewObject(), data);
}
}
public void writeNodeRef(NodeRef nodeRef, DataOutput data) throws IOException {
writeNode(nodeRef.getNode(), data);
data.write(nodeRef.getMetadataId().getRawValue());
data.writeUTF(nodeRef.getParentPath());
}
public void writeFeatureType(RevFeatureType object, DataOutput data) throws IOException {
writeName(object.getName(), data);
ImmutableList<PropertyDescriptor> descriptors = object.descriptors();
writeUnsignedVarInt(descriptors.size(), data);
for (PropertyDescriptor desc : object.type().getDescriptors()) {
writeProperty(desc, data);
}
}
public RevFeatureType readFeatureType(@Nullable ObjectId id, DataInput in) throws IOException {
return readFeatureType(id, in, DEFAULT_FEATURETYPE_FACTORY);
}
public RevFeatureType readFeatureType(@Nullable ObjectId id, DataInput in,
FeatureTypeFactory typeFactory) throws IOException {
Name name = readName(in);
int propertyCount = readUnsignedVarInt(in);
List<AttributeDescriptor> attributes = new ArrayList<AttributeDescriptor>();
for (int i = 0; i < propertyCount; i++) {
attributes.add(readAttributeDescriptor(in, typeFactory));
}
SimpleFeatureType ftype = typeFactory.createSimpleFeatureType(name, attributes, null, false,
Collections.<Filter> emptyList(), BasicFeatureTypes.FEATURE, null);
RevFeatureType revtype;
revtype = RevFeatureTypeBuilder.build(id, ftype);
return revtype;
}
private static Name readName(DataInput in) throws IOException {
String namespace = in.readUTF();
String localPart = in.readUTF();
return new NameImpl(namespace.length() == 0 ? null : namespace,
localPart.length() == 0 ? null : localPart);
}
private static AttributeType readAttributeType(DataInput in, FeatureTypeFactory typeFactory)
throws IOException {
final Name name = readName(in);
final byte typeTag = in.readByte();
final FieldType type = FieldType.valueOf(typeTag);
if (Geometry.class.isAssignableFrom(type.getBinding())) {
final boolean isCRSCode = in.readBoolean(); // as opposed to a raw WKT string
final String crsText = in.readUTF();
final CoordinateReferenceSystem crs;
try {
if (isCRSCode) {
if ("urn:ogc:def:crs:EPSG::0".equals(crsText)) {
crs = null;
} else {
boolean forceLongitudeFirst = crsText.startsWith("EPSG:");
crs = CRS.decode(crsText, forceLongitudeFirst);
}
} else {
crs = CRS.parseWKT(crsText);
}
} catch (FactoryException e) {
throw new RuntimeException(e);
}
return typeFactory.createGeometryType(name, type.getBinding(), crs, false, false,
Collections.<Filter> emptyList(), null, null);
} else {
return typeFactory.createAttributeType(name, type.getBinding(), false, false,
Collections.<Filter> emptyList(), null, null);
}
}
private static AttributeDescriptor readAttributeDescriptor(DataInput in,
FeatureTypeFactory typeFactory) throws IOException {
final Name name = readName(in);
final boolean nillable = in.readBoolean();
final int minOccurs = in.readInt();
final int maxOccurs = in.readInt();
final AttributeType type = readAttributeType(in, typeFactory);
if (type instanceof GeometryType)
return typeFactory.createGeometryDescriptor((GeometryType) type, name, minOccurs,
maxOccurs, nillable, null);
else
return typeFactory.createAttributeDescriptor(type, name, minOccurs, maxOccurs, nillable,
null);
}
private static void writeName(Name name, DataOutput data) throws IOException {
final String ns = name.getNamespaceURI();
final String lp = name.getLocalPart();
data.writeUTF(ns == null ? "" : ns);
data.writeUTF(lp);
}
private static void writePropertyType(PropertyType type, DataOutput data) throws IOException {
writeName(type.getName(), data);
data.writeByte(FieldType.forBinding(type.getBinding()).getTag());
if (type instanceof GeometryType) {
GeometryType gType = (GeometryType) type;
CoordinateReferenceSystem crs = gType.getCoordinateReferenceSystem();
String srsName;
if (crs == null) {
srsName = "urn:ogc:def:crs:EPSG::0";
} else {
final boolean longitudeFirst = CRS.getAxisOrder(crs, false) == AxisOrder.EAST_NORTH;
final boolean codeOnly = true;
String crsCode = CRS.toSRS(crs, codeOnly);
if (crsCode != null) {
srsName = (longitudeFirst ? "EPSG:" : "urn:ogc:def:crs:EPSG::") + crsCode;
// check that what we are writing is actually a valid EPSG code and we will be
// able to decode it later. If not, we will use WKT instead
try {
CRS.decode(srsName, longitudeFirst);
} catch (NoSuchAuthorityCodeException e) {
srsName = null;
} catch (FactoryException e) {
srsName = null;
}
} else {
srsName = null;
}
}
if (srsName != null) {
data.writeBoolean(true);
data.writeUTF(srsName);
} else {
final String wkt;
if (crs instanceof Formattable) {
wkt = ((Formattable) crs).toWKT(Formattable.SINGLE_LINE);
} else {
wkt = crs.toWKT();
}
data.writeBoolean(false);
data.writeUTF(wkt);
}
}
}
private static void writeProperty(PropertyDescriptor attr, DataOutput data) throws IOException {
writeName(attr.getName(), data);
data.writeBoolean(attr.isNillable());
data.writeInt(attr.getMinOccurs());
data.writeInt(attr.getMaxOccurs());
writePropertyType(attr.getType(), data);
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.test.udt.util;
import com.google.caliper.Measurement;
import com.google.caliper.MeasurementSet;
import com.google.caliper.Run;
import com.google.caliper.Scenario;
import com.google.caliper.ScenarioResult;
import com.yammer.metrics.core.Gauge;
import com.yammer.metrics.core.Meter;
import com.yammer.metrics.core.MetricsRegistry;
import com.yammer.metrics.core.Timer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Caliper measure with Metrics provider.
* <p>
* measure up to 3 values: {@link #rate()}, {@link #time()}, {@link #size()}
*/
public class CaliperMeasure {
/**
* Gauge any double value
*/
public static class SizeGuage extends Gauge<Double> {
private volatile Double size = 0.0;
@Override
public Double value() {
return size;
}
public void value(final double size) {
this.size = size;
}
}
/**
* Default rate measurement units.
*/
private static final Map<String, Integer> RATE_UNIT = new HashMap<String, Integer>();
static {
RATE_UNIT.put("Rate B/s", 1);
RATE_UNIT.put("Rate KB/s", 1024);
RATE_UNIT.put("Rate MB/s", 1024 * 1024);
RATE_UNIT.put("Rate GB/s", 1024 * 1024 * 1024);
}
/**
* Default time measurement units.
*/
private static final Map<String, Integer> TIME_UNIT = new HashMap<String, Integer>();
static {
TIME_UNIT.put("Time ns", 1);
TIME_UNIT.put("Time us", 1000);
TIME_UNIT.put("Time ms", 1000 * 1000);
TIME_UNIT.put("Time s ", 1000 * 1000 * 1000);
}
/**
* Default size measurement units.
*/
private static final Map<String, Integer> SIZE_UNIT = new HashMap<String, Integer>();
static {
SIZE_UNIT.put("Size B", 1);
SIZE_UNIT.put("Size KB", 1024);
SIZE_UNIT.put("Size MB", 1024 * 1024);
SIZE_UNIT.put("Size GB", 1024 * 1024 * 1024);
}
private final Map<Long, Measurement> rateMap = new HashMap<Long, Measurement>();
private final Map<Long, Measurement> timeMap = new HashMap<Long, Measurement>();
private final Map<Long, Measurement> sizeMap = new HashMap<Long, Measurement>();
private final MetricsRegistry metrics = new MetricsRegistry();
private final Meter rate = metrics.newMeter(getClass(), "rate", "bytes",
TimeUnit.SECONDS);
private final Timer time = metrics.newTimer(getClass(), "time",
TimeUnit.NANOSECONDS, TimeUnit.SECONDS);
private final SizeGuage size = new SizeGuage();
{
metrics.newGauge(getClass(), "", size);
}
/**
* Rate meter.
*/
public Meter rate() {
return rate;
}
/**
* Time meter.
*/
public Timer time() {
return time;
}
/**
* Size meter.
*/
public SizeGuage size() {
return size;
}
/**
* Workaround: zero breaks gwt web app.
*/
private static double filter(final double value) {
if (value <= 0.0) {
return 1.0;
} else {
return value;
}
}
/**
* Perform measurement; convert from metrics into caliper.
*/
@SuppressWarnings("FloatingPointEquality")
public void mark() {
final double rateValue = filter(rate.oneMinuteRate());
final double timeValue = filter(time.mean());
final double sizeValue = filter(size.value());
if (rateValue == 1.0 && timeValue == 1.0 && sizeValue == 1.0) {
/** ignore complete blank entries */
return;
}
final Measurement markRate = new Measurement(RATE_UNIT, rateValue,
rateValue);
rateMap.put(System.nanoTime(), markRate);
final Measurement markTime = new Measurement(TIME_UNIT, timeValue,
timeValue);
timeMap.put(System.nanoTime(), markTime);
final Measurement markSize = new Measurement(SIZE_UNIT, sizeValue,
sizeValue);
sizeMap.put(System.nanoTime(), markSize);
}
private final Map<String, String> variables = new HashMap<String, String>();
/**
* Caliper scenario variables.
*/
public Map<String, String> variables() {
return variables;
}
private static MeasurementSet measurementSet(final Map<Long, Measurement> map) {
final Measurement[] array = map.values().toArray(new Measurement[0]);
return new MeasurementSet(array);
}
/**
* Attach this measure to parent caliper run.
*/
public void appendTo(final Run run) {
final Scenario scenario = new Scenario(variables());
/** display rate as caliper durations */
final MeasurementSet timeSet = measurementSet(rateMap);
final String timeLog = null;
/** display time as caliper instances */
final MeasurementSet instSet = measurementSet(timeMap);
final String instLog = null;
/** display size as caliper memory */
final MeasurementSet heapSet = measurementSet(sizeMap);
final String heapLog = null;
final ScenarioResult scenarioResult = new ScenarioResult(timeSet,
timeLog, instSet, instLog, heapSet, heapLog);
final Map<Scenario, ScenarioResult> measurements = run
.getMeasurements();
measurements.put(scenario, scenarioResult);
}
/**
* Terminate metrics resources.
*/
public void shutdown() {
rate.stop();
time.stop();
metrics.shutdown();
}
}
| |
package com.github.nkzawa.engineio.client.transports;
import com.github.nkzawa.emitter.Emitter;
import com.github.nkzawa.thread.EventThread;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Logger;
public class PollingXHR extends Polling {
private static final Logger logger = Logger.getLogger(PollingXHR.class.getName());
private Request sendXhr;
private Request pollXhr;
public PollingXHR(Options opts) {
super(opts);
}
protected Request request() {
return this.request(null);
}
protected Request request(Request.Options opts) {
if (opts == null) {
opts = new Request.Options();
}
opts.uri = this.uri();
opts.sslContext = this.sslContext;
Request req = new Request(opts);
final PollingXHR self = this;
req.on(Request.EVENT_REQUEST_HEADERS, new Listener() {
@Override
public void call(Object... args) {
// Never execute asynchronously for support to modify headers.
self.emit(EVENT_REQUEST_HEADERS, args[0]);
}
}).on(Request.EVENT_RESPONSE_HEADERS, new Listener() {
@Override
public void call(final Object... args) {
EventThread.exec(new Runnable() {
@Override
public void run() {
self.emit(EVENT_RESPONSE_HEADERS, args[0]);
}
});
}
});
return req;
}
@Override
protected void doWrite(byte[] data, final Runnable fn) {
Request.Options opts = new Request.Options();
opts.method = "POST";
opts.data = data;
Request req = this.request(opts);
final PollingXHR self = this;
req.on(Request.EVENT_SUCCESS, new Listener() {
@Override
public void call(Object... args) {
EventThread.exec(new Runnable() {
@Override
public void run() {
fn.run();
}
});
}
});
req.on(Request.EVENT_ERROR, new Listener() {
@Override
public void call(final Object... args) {
EventThread.exec(new Runnable() {
@Override
public void run() {
Exception err = args.length > 0 && args[0] instanceof Exception ? (Exception)args[0] : null;
self.onError("xhr post error", err);
}
});
}
});
req.create();
this.sendXhr = req;
}
@Override
protected void doPoll() {
logger.fine("xhr poll");
Request req = this.request();
final PollingXHR self = this;
req.on(Request.EVENT_DATA, new Listener() {
@Override
public void call(final Object... args) {
EventThread.exec(new Runnable() {
@Override
public void run() {
Object arg = args.length > 0 ? args[0] : null;
if (arg instanceof String) {
self.onData((String)arg);
} else if (arg instanceof byte[]) {
self.onData((byte[])arg);
}
}
});
}
});
req.on(Request.EVENT_ERROR, new Listener() {
@Override
public void call(final Object... args) {
EventThread.exec(new Runnable() {
@Override
public void run() {
Exception err = args.length > 0 && args[0] instanceof Exception ? (Exception) args[0] : null;
self.onError("xhr poll error", err);
}
});
}
});
req.create();
this.pollXhr = req;
}
public static class Request extends Emitter {
public static final String EVENT_SUCCESS = "success";
public static final String EVENT_DATA = "data";
public static final String EVENT_ERROR = "error";
public static final String EVENT_REQUEST_HEADERS = "requestHeaders";
public static final String EVENT_RESPONSE_HEADERS = "responseHeaders";
private String method;
private String uri;
// data is always a binary
private byte[] data;
private SSLContext sslContext;
private HttpURLConnection xhr;
public Request(Options opts) {
this.method = opts.method != null ? opts.method : "GET";
this.uri = opts.uri;
this.data = opts.data;
this.sslContext = opts.sslContext;
}
public void create() {
final Request self = this;
try {
logger.fine(String.format("xhr open %s: %s", this.method, this.uri));
URL url = new URL(this.uri);
xhr = (HttpURLConnection)url.openConnection();
xhr.setRequestMethod(this.method);
} catch (IOException e) {
this.onError(e);
return;
}
if (xhr instanceof HttpsURLConnection && this.sslContext != null) {
((HttpsURLConnection)xhr).setSSLSocketFactory(this.sslContext.getSocketFactory());
}
Map<String, String> headers = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
if ("POST".equals(this.method)) {
xhr.setDoOutput(true);
headers.put("Content-type", "application/octet-stream");
}
self.onRequestHeaders(headers);
for (Map.Entry<String, String> header : headers.entrySet()) {
xhr.setRequestProperty(header.getKey(), header.getValue());
}
logger.fine(String.format("sending xhr with url %s | data %s", this.uri, this.data));
new Thread(new Runnable() {
@Override
public void run() {
OutputStream output = null;
try {
if (self.data != null) {
xhr.setFixedLengthStreamingMode(self.data.length);
output = new BufferedOutputStream(xhr.getOutputStream());
output.write(self.data);
output.flush();
}
Map<String, String> headers = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
Map<String, List<String>> xhrHeaderFields = xhr.getHeaderFields();
if(xhrHeaderFields != null) {
for (String key : xhrHeaderFields.keySet()) {
if (key == null) continue;
headers.put(key, xhr.getHeaderField(key));
}
}
self.onResponseHeaders(headers);
final int statusCode = xhr.getResponseCode();
if (HttpURLConnection.HTTP_OK == statusCode) {
self.onLoad();
} else {
self.onError(new IOException(Integer.toString(statusCode)));
}
} catch (IOException e) {
self.onError(e);
} finally {
try {
if (output != null) output.close();
} catch (IOException e) {}
}
}
}).start();
}
private void onSuccess() {
this.emit(EVENT_SUCCESS);
this.cleanup();
}
private void onData(String data) {
this.emit(EVENT_DATA, data);
this.onSuccess();
}
private void onData(byte[] data) {
this.emit(EVENT_DATA, data);
this.onSuccess();
}
private void onError(Exception err) {
this.emit(EVENT_ERROR, err);
this.cleanup();
}
private void onRequestHeaders(Map<String, String> headers) {
this.emit(EVENT_REQUEST_HEADERS, headers);
}
private void onResponseHeaders(Map<String, String> headers) {
this.emit(EVENT_RESPONSE_HEADERS, headers);
}
private void cleanup() {
if (xhr == null) {
return;
}
xhr.disconnect();
xhr = null;
}
private void onLoad() {
InputStream input = null;
BufferedReader reader = null;
String contentType = xhr.getContentType();
try {
if ("application/octet-stream".equalsIgnoreCase(contentType)) {
input = new BufferedInputStream(this.xhr.getInputStream());
List<byte[]> buffers = new ArrayList<byte[]>();
int capacity = 0;
int len = 0;
byte[] buffer = new byte[1024];
while ((len = input.read(buffer)) > 0) {
byte[] _buffer = new byte[len];
System.arraycopy(buffer, 0, _buffer, 0, len);
buffers.add(_buffer);
capacity += len;
}
ByteBuffer data = ByteBuffer.allocate(capacity);
for (byte[] b : buffers) {
data.put(b);
}
this.onData(data.array());
} else {
String line;
StringBuilder data = new StringBuilder();
reader = new BufferedReader(new InputStreamReader(xhr.getInputStream()));
while ((line = reader.readLine()) != null) {
data.append(line);
}
this.onData(data.toString());
}
} catch (IOException e) {
this.onError(e);
} finally {
try {
if (input != null) input.close();
} catch (IOException e) {}
try {
if (reader != null) reader.close();
} catch (IOException e) {}
}
}
public void abort() {
this.cleanup();
}
public static class Options {
public String uri;
public String method;
public byte[] data;
public SSLContext sslContext;
}
}
}
| |
/*
// Licensed to Julian Hyde under one or more contributor license
// agreements. See the NOTICE file distributed with this work for
// additional information regarding copyright ownership.
//
// Julian Hyde licenses this file to you under the Apache License,
// Version 2.0 (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
*/
package org.eigenbase.rel.metadata;
import java.util.*;
import org.eigenbase.rel.*;
import org.eigenbase.rel.rules.*;
import org.eigenbase.reltype.*;
import org.eigenbase.rex.*;
import org.eigenbase.sql.fun.*;
import net.hydromatic.optiq.BuiltinMethod;
import net.hydromatic.optiq.util.BitSets;
/**
* RelMdColumnUniqueness supplies a default implementation of {@link
* RelMetadataQuery#areColumnsUnique} for the standard logical algebra.
*/
public class RelMdColumnUniqueness {
public static final RelMetadataProvider SOURCE =
ReflectiveRelMetadataProvider.reflectiveSource(
BuiltinMethod.COLUMN_UNIQUENESS.method, new RelMdColumnUniqueness());
//~ Constructors -----------------------------------------------------------
private RelMdColumnUniqueness() {}
//~ Methods ----------------------------------------------------------------
public Boolean areColumnsUnique(
FilterRelBase rel,
BitSet columns,
boolean ignoreNulls) {
return RelMetadataQuery.areColumnsUnique(
rel.getChild(),
columns,
ignoreNulls);
}
public Boolean areColumnsUnique(
SortRel rel,
BitSet columns,
boolean ignoreNulls) {
return RelMetadataQuery.areColumnsUnique(
rel.getChild(),
columns,
ignoreNulls);
}
public Boolean areColumnsUnique(
CorrelatorRel rel,
BitSet columns,
boolean ignoreNulls) {
return RelMetadataQuery.areColumnsUnique(
rel.getLeft(),
columns,
ignoreNulls);
}
public Boolean areColumnsUnique(
ProjectRelBase rel,
BitSet columns,
boolean ignoreNulls) {
// ProjectRel maps a set of rows to a different set;
// Without knowledge of the mapping function(whether it
// preserves uniqueness), it is only safe to derive uniqueness
// info from the child of a project when the mapping is f(a) => a.
//
// Also need to map the input column set to the corresponding child
// references
List<RexNode> projExprs = rel.getProjects();
BitSet childColumns = new BitSet();
for (int bit : BitSets.toIter(columns)) {
RexNode projExpr = projExprs.get(bit);
if (projExpr instanceof RexInputRef) {
childColumns.set(((RexInputRef) projExpr).getIndex());
} else if (projExpr instanceof RexCall && ignoreNulls) {
// If the expression is a cast such that the types are the same
// except for the nullability, then if we're ignoring nulls,
// it doesn't matter whether the underlying column reference
// is nullable. Check that the types are the same by making a
// nullable copy of both types and then comparing them.
RexCall call = (RexCall) projExpr;
if (call.getOperator() != SqlStdOperatorTable.CAST) {
continue;
}
RexNode castOperand = call.getOperands().get(0);
if (!(castOperand instanceof RexInputRef)) {
continue;
}
RelDataTypeFactory typeFactory =
rel.getCluster().getTypeFactory();
RelDataType castType =
typeFactory.createTypeWithNullability(
projExpr.getType(), true);
RelDataType origType = typeFactory.createTypeWithNullability(
castOperand.getType(),
true);
if (castType.equals(origType)) {
childColumns.set(((RexInputRef) castOperand).getIndex());
}
} else {
// If the expression will not influence uniqueness of the
// projection, then skip it.
continue;
}
}
// If no columns can affect uniqueness, then return unknown
if (childColumns.cardinality() == 0) {
return null;
}
return RelMetadataQuery.areColumnsUnique(
rel.getChild(),
childColumns,
ignoreNulls);
}
public Boolean areColumnsUnique(
JoinRelBase rel,
BitSet columns, boolean
ignoreNulls) {
if (columns.cardinality() == 0) {
return false;
}
RelNode left = rel.getLeft();
RelNode right = rel.getRight();
// Divide up the input column mask into column masks for the left and
// right sides of the join
BitSet leftColumns = new BitSet();
BitSet rightColumns = new BitSet();
int nLeftColumns = left.getRowType().getFieldCount();
for (int bit : BitSets.toIter(columns)) {
if (bit < nLeftColumns) {
leftColumns.set(bit);
} else {
rightColumns.set(bit - nLeftColumns);
}
}
// If the original column mask contains columns from both the left and
// right hand side, then the columns are unique if and only if they're
// unique for their respective join inputs
Boolean leftUnique =
RelMetadataQuery.areColumnsUnique(left, leftColumns, ignoreNulls);
Boolean rightUnique =
RelMetadataQuery.areColumnsUnique(right, rightColumns, ignoreNulls);
if ((leftColumns.cardinality() > 0)
&& (rightColumns.cardinality() > 0)) {
if ((leftUnique == null) || (rightUnique == null)) {
return null;
} else {
return leftUnique && rightUnique;
}
}
// If we're only trying to determine uniqueness for columns that
// originate from one join input, then determine if the equijoin
// columns from the other join input are unique. If they are, then
// the columns are unique for the entire join if they're unique for
// the corresponding join input, provided that input is not null
// generating.
BitSet leftJoinCols = new BitSet();
BitSet rightJoinCols = new BitSet();
RelMdUtil.findEquiJoinCols(
left,
right,
rel.getCondition(),
leftJoinCols,
rightJoinCols);
if (leftColumns.cardinality() > 0) {
if (rel.getJoinType().generatesNullsOnLeft()) {
return false;
}
Boolean rightJoinColsUnique =
RelMetadataQuery.areColumnsUnique(
right,
rightJoinCols,
ignoreNulls);
if ((rightJoinColsUnique == null) || (leftUnique == null)) {
return null;
}
return rightJoinColsUnique && leftUnique;
} else if (rightColumns.cardinality() > 0) {
if (rel.getJoinType().generatesNullsOnRight()) {
return false;
}
Boolean leftJoinColsUnique =
RelMetadataQuery.areColumnsUnique(
left,
leftJoinCols,
ignoreNulls);
if ((leftJoinColsUnique == null) || (rightUnique == null)) {
return null;
}
return leftJoinColsUnique && rightUnique;
}
throw new AssertionError();
}
public Boolean areColumnsUnique(
SemiJoinRel rel,
BitSet columns,
boolean ignoreNulls) {
// only return the unique keys from the LHS since a semijoin only
// returns the LHS
return RelMetadataQuery.areColumnsUnique(
rel.getLeft(),
columns,
ignoreNulls);
}
public Boolean areColumnsUnique(
AggregateRelBase rel,
BitSet columns,
boolean ignoreNulls) {
// group by keys form a unique key
if (rel.getGroupCount() > 0) {
BitSet groupKey = new BitSet();
for (int i = 0; i < rel.getGroupCount(); i++) {
groupKey.set(i);
}
return BitSets.contains(columns, groupKey);
} else {
// interpret an empty set as asking whether the aggregation is full
// table (in which case it returns at most one row);
// TODO jvs 1-Sept-2008: apply this convention consistently
// to other relational expressions, as well as to
// RelMetadataQuery.getUniqueKeys
return columns.isEmpty();
}
}
// Catch-all rule when none of the others apply.
public Boolean areColumnsUnique(
RelNode rel,
BitSet columns,
boolean ignoreNulls) {
// no information available
return null;
}
}
// End RelMdColumnUniqueness.java
| |
/*
* Copyright (c) 2012 Felix Mo. All rights reserved.
*
* CitySim is published under the terms of the MIT License. See the LICENSE file for more information.
*
*/
import greenfoot.*; // (World, Actor, GreenfootImage, Greenfoot and MouseInfo)
import java.awt.Point;
import java.util.HashMap;
/**
* Tile
* CitySim
* v0.1
*
* Created by Felix Mo on 02-11-2012
*
* Data structure for map tiles
*
*/
public class Tile
{
// ---------------------------------------------------------------------------------------------------------------------
/*
* INSTANCE VARIABLES *
*/
private HashMap properties;
/*
* CONSTANTS *
*/
public static final int SIZE = 32; // size of square tile; px
/*
* TILE TYPES *
*/
public static final int EMPTY = 0;
public static final int GROUND = 1;
public static final int WATER = 3;
// GRASS
public static final int GRASS_1 = 38;
// STREETS
// H = horizontal
// V = vertical
// U = up
// D = down
// L = left
// R = right
// T = top
// D = down
// L = left
// R = right
public static final int BRIDGE_H = 65;
public static final int BRIDGE_V = 66;
public static final int STREET_H = 67;
public static final int STREET_V = 68;
public static final int STREET_B_TR = 69;
public static final int STREET_B_BR = 70;
public static final int STREET_B_BL = 71;
public static final int STREET_B_TL = 72;
public static final int STREET_H_U = 73;
public static final int STREET_V_R = 74;
public static final int STREET_H_D = 75;
public static final int STREET_V_L = 76;
public static final int STREET_INTERSECTION = 77;
// POWER GRID
public static final int POWERLINE_ROAD_H = 78;
public static final int POWERLINE_ROAD_V = 79;
public static final int POWERLINE_H = 211;
public static final int POWERLINE_V = 212;
public static final int POWERLINE_B_TR = 213;
public static final int POWERLINE_B_BR = 214;
public static final int POWERLINE_B_BL = 215;
public static final int POWERLINE_B_TL = 216;
public static final int POWERLINE_H_U = 217;
public static final int POWERLINE_V_R = 218;
public static final int POWERLINE_H_D = 219;
public static final int POWERLINE_V_L = 220;
public static final int POWERLINE_INTERSECTION = 221;
// RECREATION
public static final int PARK = 841;
/*
* IMAGE FILES *
*/
// ---------------------------------------------------------------------------------------------------------------------
public Tile(int dbID, Point position, int type) {
this.properties = new HashMap();
this.properties.put(Data.TILES_ID, new Integer(dbID));
this.properties.put(Data.TILES_X, new Integer(position.x));
this.properties.put(Data.TILES_Y, new Integer(position.y));
this.properties.put(Data.TILES_TYPE, new Integer(type));
this.properties.put(Data.TILES_ZONE, new Integer(0));
this.properties.put(Data.TILES_ZONEID, new Integer(-1));
this.properties.put(Data.TILES_ROAD, new Integer(0));
this.properties.put(Data.TILES_POWERED, new Integer(0));
this.properties.put(Data.TILES_POWERGRID_TYPE, new Integer(0));
this.properties.put(Data.TILES_RECREATION_TYPE, new Integer(0));
}
public Tile(HashMap properties) {
this.properties = properties;
}
/*
* ACCESSORS *
*/
public Object get(String key) {
return this.properties.get(key);
}
public int dbID() {
return ((Integer)properties.get(Data.TILES_ID)).intValue();
}
public Point position() {
return new Point((Integer)properties.get(Data.TILES_X), (Integer)properties.get(Data.TILES_Y));
}
public int type() {
return ((Integer)properties.get(Data.TILES_TYPE)).intValue();
}
public void setType(int value) {
properties.put(Data.TILES_TYPE, new Integer(value));
}
public int zone() {
return ((Integer)properties.get(Data.TILES_ZONE)).intValue();
}
public void setZone(int value) {
properties.put(Data.TILES_ZONE, new Integer(value));
}
public int zoneID() {
return ((Integer)properties.get(Data.TILES_ZONEID)).intValue();
}
public void setZoneID(int id) {
properties.put(Data.TILES_ZONEID, new Integer(id));
}
public int road() {
return ((Integer)properties.get(Data.TILES_ROAD)).intValue();
}
public void setRoad(int value) {
properties.put(Data.TILES_ROAD, new Integer(value));
}
public int powered() {
return ((Integer)properties.get(Data.TILES_POWERED)).intValue();
}
public void setPowered(int value) {
properties.put(Data.TILES_POWERED, new Integer(value));
}
public int powerGrid() {
return ((Integer)properties.get(Data.TILES_POWERGRID_TYPE)).intValue();
}
public void setPowerGrid(int value) {
properties.put(Data.TILES_POWERGRID_TYPE, new Integer(value));
}
public int recreation() {
return ((Integer)properties.get(Data.TILES_RECREATION_TYPE)).intValue();
}
public void setRecreation(int value) {
properties.put(Data.TILES_RECREATION_TYPE, new Integer(value));
}
public GreenfootImage image() {
return ImageCache.get(((Integer)this.properties.get(Data.TILES_TYPE)).intValue());
}
/*
* HELPERS *
*/
public static GreenfootImage imageFromDiskForType(Integer type) {
return new GreenfootImage("images/tiles/" + type + ".png");
}
public static GreenfootImage imageFromCacheForType(Integer value) {
return ImageCache.get(value);
}
}
| |
/*
* Copyright (c) OSGi Alliance (2004, 2008). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.dmtree.security;
import java.security.Permission;
import java.security.PermissionCollection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
/**
* Indicates the callers authority to create DMT sessions on behalf of a remote
* management server. Only protocol adapters communicating with management
* servers should be granted this permission.
* <p>
* <code>DmtPrincipalPermission</code> has a target string which controls the
* name of the principal on whose behalf the protocol adapter can act. A
* wildcard is allowed at the end of the target string, to allow using any
* principal name with the given prefix. The "*" target means the
* adapter can create a session in the name of any principal.
*
* @version $Revision: 5673 $
*/
public class DmtPrincipalPermission extends Permission {
private static final long serialVersionUID = 6388752177325038332L;
// specifies whether the target string had a wildcard at the end
private final boolean isPrefix;
// the target string without the wildcard (if there was one)
private final String principal;
/**
* Creates a new <code>DmtPrincipalPermission</code> object with its name
* set to the target string. Name must be non-null and non-empty.
*
* @param target the name of the principal, can end with <code>*</code> to
* match any principal with the given prefix
* @throws NullPointerException if <code>name</code> is <code>null</code>
* @throws IllegalArgumentException if <code>name</code> is empty
*/
public DmtPrincipalPermission(String target) {
super(target);
if (target == null)
throw new NullPointerException(
"'target' parameter must not be null.");
if (target.equals(""))
throw new IllegalArgumentException(
"'target' parameter must not be empty.");
isPrefix = target.endsWith("*");
if (isPrefix)
principal = target.substring(0, target.length() - 1);
else
principal = target;
}
/**
* Creates a new <code>DmtPrincipalPermission</code> object using the
* 'canonical' two argument constructor. In this version this class does not
* define any actions, the second argument of this constructor must be "*"
* so that this class can later be extended in a backward compatible way.
*
* @param target the name of the principal, can end with <code>*</code> to
* match any principal with the given prefix
* @param actions no actions defined, must be "*" for forward compatibility
* @throws NullPointerException if <code>name</code> or
* <code>actions</code> is <code>null</code>
* @throws IllegalArgumentException if <code>name</code> is empty or
* <code>actions</code> is not "*"
*/
public DmtPrincipalPermission(String target, String actions) {
this(target);
if (actions == null)
throw new NullPointerException(
"'actions' parameter must not be null.");
if (!actions.equals("*"))
throw new IllegalArgumentException(
"'actions' parameter must be \"*\".");
}
/**
* Checks whether the given object is equal to this DmtPrincipalPermission
* instance. Two DmtPrincipalPermission instances are equal if they have the
* same target string.
*
* @param obj the object to compare to this DmtPrincipalPermission instance
* @return <code>true</code> if the parameter represents the same
* permissions as this instance
*/
public boolean equals(Object obj) {
if (obj == this)
return true;
if (!(obj instanceof DmtPrincipalPermission))
return false;
DmtPrincipalPermission other = (DmtPrincipalPermission) obj;
return isPrefix == other.isPrefix && principal.equals(other.principal);
}
/**
* Returns the action list (always <code>*</code> in the current version).
*
* @return the action string "*"
*/
public String getActions() {
return "*";
}
/**
* Returns the hash code for this permission object. If two
* DmtPrincipalPermission objects are equal according to the {@link #equals}
* method, then calling this method on each of the two
* DmtPrincipalPermission objects must produce the same integer result.
*
* @return hash code for this permission object
*/
public int hashCode() {
return new Boolean(isPrefix).hashCode() ^ principal.hashCode();
}
/**
* Checks if this DmtPrincipalPermission object implies the specified
* permission. Another DmtPrincipalPermission instance is implied by this
* permission either if the target strings are identical, or if this target
* can be made identical to the other target by replacing a trailing
* "*" with any string.
*
* @param p the permission to check for implication
* @return true if this DmtPrincipalPermission instance implies the
* specified permission
*/
public boolean implies(Permission p) {
if (!(p instanceof DmtPrincipalPermission))
return false;
DmtPrincipalPermission other = (DmtPrincipalPermission) p;
return impliesPrincipal(other);
}
/**
* Returns a new PermissionCollection object for storing
* DmtPrincipalPermission objects.
*
* @return the new PermissionCollection
*/
public PermissionCollection newPermissionCollection() {
return new DmtPrincipalPermissionCollection();
}
/*
* Returns true if the principal parameter of the given
* DmtPrincipalPermission is implied by the principal of this permission,
* i.e. this principal is a prefix of the other principal but ends with a *,
* or the two principal strings are equal.
*/
boolean impliesPrincipal(DmtPrincipalPermission p) {
return isPrefix ? p.principal.startsWith(principal) : !p.isPrefix
&& p.principal.equals(principal);
}
}
/**
* Represents a homogeneous collection of DmtPrincipalPermission objects.
*/
final class DmtPrincipalPermissionCollection extends PermissionCollection {
private static final long serialVersionUID = -6692103535775802684L;
private ArrayList perms;
/**
* Create an empty DmtPrincipalPermissionCollection object.
*/
public DmtPrincipalPermissionCollection() {
perms = new ArrayList();
}
/**
* Adds a permission to the DmtPrincipalPermissionCollection.
*
* @param permission the Permission object to add
* @exception IllegalArgumentException if the permission is not a
* DmtPrincipalPermission
* @exception SecurityException if this DmtPrincipalPermissionCollection
* object has been marked readonly
*/
public void add(Permission permission) {
if (!(permission instanceof DmtPrincipalPermission))
throw new IllegalArgumentException(
"Cannot add permission, invalid permission type: "
+ permission);
if (isReadOnly())
throw new SecurityException(
"Cannot add permission, collection is marked read-only.");
// only add new permission if it is not already implied by the
// permissions in the collection
if (!implies(permission)) {
// remove all permissions that are implied by the new one
Iterator i = perms.iterator();
while (i.hasNext())
if (permission.implies((DmtPrincipalPermission) i.next()))
i.remove();
// no need to synchronize because all adds are done sequentially
// before any implies() calls
perms.add(permission);
}
}
/**
* Check whether this set of permissions implies the permission specified in
* the parameter.
*
* @param permission the Permission object to compare
* @return true if the parameter permission is a proper subset of the
* permissions in the collection, false otherwise
*/
public boolean implies(Permission permission) {
if (!(permission instanceof DmtPrincipalPermission))
return false;
DmtPrincipalPermission other = (DmtPrincipalPermission) permission;
Iterator i = perms.iterator();
while (i.hasNext())
if (((DmtPrincipalPermission) i.next()).impliesPrincipal(other))
return true;
return false;
}
/**
* Returns an enumeration of all the DmtPrincipalPermission objects in the
* container. The returned value cannot be <code>null</code>.
*
* @return an enumeration of all the DmtPrincipalPermission objects
*/
public Enumeration elements() {
// Convert Iterator into Enumeration
return Collections.enumeration(perms);
}
}
| |
/*******************************************************************************
* PathVisio, a tool for data visualization and analysis using biological pathways
* Copyright 2006-2019 BiGCaT Bioinformatics
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package org.pathvisio.visualization.plugins;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Paint;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.TexturePaint;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import java.awt.image.FilteredImageSource;
import java.awt.image.ImageProducer;
import java.awt.image.RGBImageFilter;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.swing.JPanel;
import org.bridgedb.Xref;
import org.jdom2.Element;
import org.pathvisio.core.Engine;
import org.pathvisio.core.debug.Logger;
import org.pathvisio.core.util.ColorConverter;
import org.pathvisio.core.util.Resources;
import org.pathvisio.core.view.GeneProduct;
import org.pathvisio.core.view.Graphics;
import org.pathvisio.data.DataException;
import org.pathvisio.data.IRow;
import org.pathvisio.data.ISample;
import org.pathvisio.desktop.gex.CachedData;
import org.pathvisio.desktop.gex.CachedData.Callback;
import org.pathvisio.desktop.gex.GexManager;
import org.pathvisio.desktop.gex.ReporterData;
import org.pathvisio.desktop.visualization.AbstractVisualizationMethod;
import org.pathvisio.desktop.visualization.ColorSet;
import org.pathvisio.desktop.visualization.ColorSetManager;
import org.pathvisio.desktop.visualization.VisualizationManager.VisualizationException;
/**
* Visualization method for coloring by expression: can color a datanode by one or more
* expression values. In basic mode several samples use the same colorSet,
* in advanced mode each sample can have a different colorSet
*/
public class ColorByExpression extends AbstractVisualizationMethod {
static final Color DEFAULT_TRANSPARENT = Engine.TRANSPARENT_COLOR;
static final Color LINE_COLOR_DEFAULT = Color.BLACK;
static private final Paint STRIPE_PATTERN;
static
{
BufferedImage buf = new BufferedImage(8, 8, BufferedImage.TYPE_INT_RGB);
java.awt.Graphics g = buf.getGraphics();
g.setColor(Color.GRAY);
g.fillRect(0, 0, 8, 8);
g.setColor(Color.LIGHT_GRAY);
g.fillPolygon(
new int[] {4, 0, 0, 8}, new int[] {0, 4, 8, 0}, 4);
g.fillPolygon(
new int[] {8, 4, 8}, new int[] {4, 8, 8}, 3);
STRIPE_PATTERN = new TexturePaint(buf, new Rectangle(0,0,8,8));
}
private List<ConfiguredSample> useSamples = new ArrayList<ConfiguredSample>();
List<URL> imageURLs;
private final GexManager gexManager;
private final ColorSetManager csm;
GexManager getGexManager() { return gexManager; }
private List<URL> defaultURLs() {
return new ArrayList<URL>(Arrays.asList(new URL[] {
Resources.getResourceURL("protein_hi.bmp"),
Resources.getResourceURL("mRNA_hi.bmp") }));
}
public ColorByExpression(GexManager gexManager, ColorSetManager csm) {
this.gexManager = gexManager;
this.csm = csm;
setIsConfigurable(true);
setUseProvidedArea(true);
}
public Component visualizeOnToolTip(Graphics g) {
// TODO Auto-generated method stub
return null;
}
/**
* Check whether advanced settings are used
*/
public boolean isAdvanced() {
//Advanced when different colorsets or an image is specified
if(useSamples.size() == 0) {
return false;
}
for(ConfiguredSample cs : useSamples) {
if(cs.getURL() != null) {
return true;
}
}
return getSingleColorSet() == null;
}
/**
* Set a single colorset for all samples.
*/
public void setSingleColorSet(ColorSet cs) {
for(ConfiguredSample s : useSamples) {
s.setColorSet(cs);
}
}
/**
* Get the single colorset that is used for all
* samples. Returns null when different colorsets are
* used.
*/
public ColorSet getSingleColorSet() {
ColorSet cs = null;
for(ConfiguredSample s : useSamples) {
if(cs == null) {
cs = s.getColorSet();
} else {
if(cs != s.getColorSet()) {
return null;
}
}
}
return cs;
}
/**
* Get the configured sample for the given sample. Returns
* null when no configured sample is found.
*/
public ConfiguredSample getConfiguredSample(ISample s) {
for(ConfiguredSample cs : useSamples) {
if(cs.getSample() != null && cs.getSample() == s) {
return cs;
}
}
return null;
}
public String getDescription() {
return "Color DataNodes by their expression value";
}
public String getName() {
return "Expression as color";
}
public JPanel getConfigurationPanel() {
return new ColorByExpressionPanel(this, csm);
}
public List<ConfiguredSample> getConfiguredSamples() {
return useSamples;
}
public List<ISample> getSelectedSamples() {
List<ISample> samples = new ArrayList<ISample>();
for(ConfiguredSample cs : useSamples)
{
samples.add(cs.getSample());
}
return samples;
}
List<URL> getImageURLs() {
if(imageURLs == null) imageURLs = defaultURLs();
return imageURLs;
}
void addImageURL(URL url) {
if(!getImageURLs().contains(url)) getImageURLs().add(url);
}
void removeImageURL(URL url) {
if(url.getProtocol().equals("file")) getImageURLs().remove(url);
}
void drawImage(ConfiguredSample is, Color rgb, Rectangle area, Graphics2D g2d) {
Image img = is.getImage(rgb);
if(img != null) {
drawBackground(area, g2d);
Dimension scaleTo = is.getScaleSize(new Dimension(area.width, area.height));
Image simg = img.getScaledInstance(scaleTo.width, scaleTo.height, Image.SCALE_SMOOTH);
int xs = area.width - scaleTo.width;
int ys = area.height - scaleTo.height;
g2d.drawImage(simg, area.x + xs / 2, area.y + ys / 2, null);
}
}
void drawBackground(Rectangle area, Graphics2D g2d) {
g2d.setColor(Color.WHITE);
g2d.fill(area);
}
public void visualizeOnDrawing(Graphics g, Graphics2D g2d) {
if(g instanceof GeneProduct)
{
if(useSamples.size() == 0) return; //Nothing to draw
GeneProduct gp = (GeneProduct) g;
Shape da = getVisualization().provideDrawArea(this, g);
Rectangle area = da.getBounds();
drawArea(gp, area, g2d);
}
}
void drawArea(final GeneProduct gp, Rectangle area, Graphics2D g2d) {
int nr = useSamples.size();
java.awt.Shape origClip = g2d.getClip();
g2d.clip(gp.getShape());
double xf = area.x;
double wf = (double)area.width / nr;
for(int i = 0; i < nr; i++)
{
Rectangle r = new Rectangle(
(int)xf, area.y,
(int)(xf + wf), area.height);
xf += wf;
ConfiguredSample s = useSamples.get(i);
Xref idc = new Xref(gp.getPathwayElement().getElementID(), gp.getPathwayElement().getDataSource());
CachedData cache = gexManager.getCachedData();
if(cache == null) continue;
if(s.getColorSet() == null) {
Logger.log.trace("No colorset for sample " + s);
continue; //No ColorSet for this sample
}
if(cache.hasData(idc))
{
List<? extends IRow> data = cache.getData(idc);
if (data.size() > 0)
{
drawSample(s, data, r, g2d);
}
else
{
drawNoDataFound(s, area, g2d);
}
}
else
{
drawWaitingForData(area, g2d);
cache.asyncGet(idc, new Callback()
{
public void callback()
{
gp.markDirty();
}
});
}
}
g2d.setClip(origClip);
g2d.setColor(Color.BLACK);
g2d.draw (gp.getShape());
}
void drawNoDataFound(ConfiguredSample s, Rectangle area, Graphics2D g2d) {
ColorSet cs = s.getColorSet();
drawColoredRectangle(area, cs.getColor(ColorSet.ID_COLOR_NO_DATA_FOUND), g2d);
}
void drawWaitingForData (Rectangle r, Graphics2D g2d)
{
g2d.setPaint(STRIPE_PATTERN);
g2d.fill(r);
if(drawLine) {
g2d.setColor(getLineColor());
g2d.draw(r);
}
}
void drawColoredRectangle(Rectangle r, Color c, Graphics2D g2d) {
g2d.setColor(c);
g2d.fill(r);
if(drawLine) {
g2d.setColor(getLineColor());
g2d.draw(r);
}
}
Color lineColor;
boolean drawLine = false;
void drawSampleAvg(ConfiguredSample s, List<? extends IRow> data, Rectangle area, Graphics2D g2d) {
ColorSet cs = s.getColorSet();
Color rgb = cs.getColor(ReporterData.createListSummary(data), s.getSample());
drawColoredRectangle(area, rgb, g2d);
}
void drawSampleBar(ConfiguredSample s, List<? extends IRow> refdata, Rectangle area, Graphics2D g2d) {
ColorSet cs = s.getColorSet();
int n = refdata.size();
double hf = (double)area.height / n;
double yf = area.y;
for(int i = 0; i < n; i++) {
Color rgb = cs.getColor(refdata.get(i), s.getSample());
Rectangle r = new Rectangle(
area.x, (int)yf,
area.width, (int)(yf + hf));
yf += hf;
drawColoredRectangle(r, rgb, g2d);
}
}
void setLineColor(Color rgb) {
if(rgb != null) {
lineColor = rgb;
modified();
}
}
Color getLineColor() { return lineColor == null ? LINE_COLOR_DEFAULT : lineColor; }
void setDrawLine(boolean draw) {
drawLine = draw;
modified();
}
void drawSample(ConfiguredSample s, List<? extends IRow> data, Rectangle area, Graphics2D g2d) {
ColorSet cs = s.getColorSet();
if(s.hasImage()) {
Color rgb = cs.getColor(ReporterData.createListSummary(data), s.getSample());
drawImage(s, rgb, area, g2d);
} else {
if(data.size() > 1) {
switch(s.getAmbigiousType()) {
case ConfiguredSample.AMBIGIOUS_AVG:
drawSampleAvg(s, data, area, g2d);
break;
case ConfiguredSample.AMBIGIOUS_BARS:
drawSampleBar(s, data, area, g2d);
break;
}
} else {
Color rgb = cs.getColor(data.get(0), s.getSample());
drawColoredRectangle(area, rgb, g2d);
}
}
}
void setUseSamples(List<ConfiguredSample> samples)
{
useSamples = samples;
}
/**
* Add a sample to use for visualization
* @param s The sample to add
*/
public void addUseSample(ISample s) {
if(s != null) {
if(!useSamples.contains(s)) useSamples.add(new ConfiguredSample(s));
modified();
}
}
/**
* Remove a sample from the samples that will be used for visualization
* @param s
*/
void removeUseSample(ConfiguredSample s) {
if(s != null) {
useSamples.remove(s);
modified();
}
}
public final Element toXML() {
Element xml = super.toXML();
saveAttributes(xml);
for(ConfiguredSample s : useSamples)
{
if (s.getColorSet() != null) xml.addContent(s.toXML());
}
return xml;
}
final static String XML_ELM_URL = "image";
static final String XML_ATTR_DRAWLINE = "drawLine";
static final String XML_ELM_LINECOLOR = "lineColor";
/**
* Implement this method to save attributes to the XML element
* that contain additional configuration of this plug-ins
* @param xml The XML element to save the attributes to
*/
void loadAttributes(Element xml) {
for(Object o : xml.getChildren(XML_ELM_URL)) {
try {
URL url = new URL(((Element)o).getText());
addImageURL(url);
} catch(Exception e) {
Logger.log.error("couldn't load image URL for plugin", e);
}
}
try {
lineColor = ColorConverter.parseColorElement(xml.getChild(XML_ELM_LINECOLOR));
drawLine = Boolean.parseBoolean(xml.getAttributeValue(XML_ATTR_DRAWLINE));
} catch(Exception e) {
Logger.log.error("Unable to parse settings for plugin", e);
}
}
/**
* Implement this method to save attributes to the XML element
* that contain additional configuration of this plug-ins
* @param xml The XML element to save the attributes to
*/
void saveAttributes(Element xml) {
for(URL url : getImageURLs()) {
if(defaultURLs().contains(url)) continue; //Skip default urls
Element elm = new Element(XML_ELM_URL);
elm.setText(url.toString());
xml.addContent(elm);
}
xml.setAttribute(XML_ATTR_DRAWLINE, Boolean.toString(drawLine));
xml.addContent(ColorConverter.createColorElement(XML_ELM_LINECOLOR, getLineColor()));
}
public final void loadXML(Element xml) {
super.loadXML(xml);
loadAttributes(xml);
for(Object o : xml.getChildren(ConfiguredSample.XML_ELEMENT)) {
try {
useSamples.add(new ConfiguredSample((Element)o));
} catch(VisualizationException e) {
Logger.log.error("Unable to load plugin settings", e);
}
}
}
/**
* This class stores the configuration for a sample that is selected for
* visualization. In this implementation, a color-set to use for visualization is stored.
* Extend this class to store additional configuration data.
*/
public class ConfiguredSample {
public static final int AMBIGIOUS_AVG = 0;
public static final int AMBIGIOUS_BARS = 1;
ColorSet colorSet = null;
int ambigious = AMBIGIOUS_BARS;
BufferedImage cacheImage;
URL imageURL;
Color replaceColor = DEFAULT_TRANSPARENT;
int tolerance; //range 0 - 255;
private ISample sample;
int getAmbigiousType() { return ambigious; }
void setAmbigiousType(int type) {
ambigious = type;
modified();
}
public ISample getSample()
{
return sample;
}
public int getId() {
return sample.getId();
}
final static String XML_ATTR_ASPECT = "maintain-aspect-ratio";
final static String XML_ATTR_TOLERANCE = "tolerance";
final static String XML_ATTR_IMAGE = "image-url";
final static String XML_ATTR_REPLACE = "replace-color";
final static String XML_ATTR_AMBIGIOUS = "ambigious";
protected void saveAttributes(Element xml) {
xml.setAttribute(XML_ATTR_AMBIGIOUS, Integer.toString(ambigious));xml.setAttribute(XML_ATTR_ASPECT, Boolean.toString(getMaintainAspect()));
if(imageURL != null) {
xml.setAttribute(XML_ATTR_ASPECT, "" + getMaintainAspect());
xml.setAttribute(XML_ATTR_TOLERANCE, Integer.toString(getTolerance()));
xml.setAttribute(XML_ATTR_IMAGE, imageURL.toString());
xml.addContent(ColorConverter.createColorElement(XML_ATTR_REPLACE, getReplaceColor()));
}
}
protected void loadAttributes(Element xml) {
int amb = Integer.parseInt(xml.getAttributeValue(XML_ATTR_AMBIGIOUS));
setAmbigiousType(amb);
try {
if(xml.getAttributeValue(XML_ATTR_IMAGE) != null) {
setMaintainAspect(Boolean.parseBoolean(xml.getAttributeValue(XML_ATTR_ASPECT)));
setTolerance(Integer.parseInt(xml.getAttributeValue(XML_ATTR_TOLERANCE)));
setURL(new URL(xml.getAttributeValue(XML_ATTR_IMAGE)));
setReplaceColor(ColorConverter.parseColorElement(xml.getChild(XML_ATTR_REPLACE)));
}
} catch(Exception e) {
Logger.log.error("Unable to load plugin", e);
}
}
static final String XML_ELEMENT = "sample";
static final String XML_ATTR_ID = "id";
static final String XML_ATTR_COLORSET = "colorset";
private final Element toXML() {
Element xml = new Element(XML_ELEMENT);
xml.setAttribute(XML_ATTR_ID, Integer.toString(sample.getId()));
xml.setAttribute(XML_ATTR_COLORSET, colorSet.getName());
saveAttributes(xml);
return xml;
}
private final void loadXML(Element xml) throws VisualizationException
{
int id = Integer.parseInt(xml.getAttributeValue(XML_ATTR_ID));
String csn = xml.getAttributeValue(XML_ATTR_COLORSET);
try
{
sample = gexManager.getCurrentGex().getSamples().get(id);
}
catch (DataException ex)
{
throw new VisualizationException(ex);
}
if (sample == null)
{
throw new VisualizationException("Couldn't find Sample with id " + id);
}
setColorSet(getVisualization().getManager().getColorSetManager().getColorSet(csn));
loadAttributes(xml);
}
/**
* Create a configured sample based on an existing sample
* @param s The sample to base the configured sample on
*/
public ConfiguredSample(ISample s) {
if (s == null) throw new NullPointerException();
sample = s;
}
protected ColorByExpression getMethod() {
return ColorByExpression.this;
}
/**
* Create a configured sample from the information in the given XML element
* @param xml The XML element containing information to create the configured sample from
* @throws VisualizationException
*/
public ConfiguredSample(Element xml) throws VisualizationException {
loadXML(xml);
}
/**
* Set the color-set to use for visualization of this sample
*/
public void setColorSet(ColorSet cs) {
colorSet = cs;
modified();
}
/**
* Get the color-set to use for visualization of this sample
* @return the color-set
*/
protected ColorSet getColorSet() {
return colorSet;
}
/**
* Get the name of the color-sets that is selected for visualization
* @return The name of the selected color-set, or "no colorsets available", if no
* color-sets exist
*/
protected String getColorSetName() {
ColorSet cs = getColorSet();
return cs == null ? "no colorsets available" : cs.getName();
}
boolean aspectRatio = true;
public void setURL(URL url) {
imageURL = url;
invalidateImageCache();
modified();
}
public void setDefaultURL() {
setURL(defaultURLs().get(0));
}
public URL getURL() {
return imageURL;
}
public boolean hasImage() {
return imageURL != null;
}
public void setReplaceColor(Color rgb) {
if(rgb != null) replaceColor = rgb;
invalidateImageCache();
modified();
}
public Color getReplaceColor() { return replaceColor; }
public void setMaintainAspect(boolean maintain) {
aspectRatio = maintain;
invalidateImageCache();
modified();
}
public boolean getMaintainAspect() { return aspectRatio;}
public void setTolerance(int tol) {
tolerance = tol;
invalidateImageCache();
modified();
}
public int getTolerance() { return tolerance; }
public BufferedImage getImage() {
if(imageURL == null) return null;
if(cacheImage == null) {
try {
cacheImage = ImageIO.read(imageURL);
} catch(IOException e) {
Logger.log.error("Unable to load image from " + imageURL, e);
//TODO: better exception handling
return null;
}
}
return cacheImage.getSubimage(0, 0, cacheImage.getWidth(), cacheImage.getHeight());
}
private void invalidateImageCache() {
scaledImages.clear();
coloredImages.clear();
cacheImage = null;
}
private Map<Dimension, Image> scaledImages = new HashMap<Dimension, Image>();
private Map<Color, Image> coloredImages = new HashMap<Color, Image>();
public Image getImage(Dimension size) {
return getImage(size, null);
}
public Image getImage(Color replaceWith) {
Image img = coloredImages.get(replaceWith);
if(img == null) {
img = getImage();
if(img == null) return null;
if(replaceWith != null) img = doReplaceColor(img, replaceColor, replaceWith, tolerance);
coloredImages.put(replaceWith, img);
}
return img;
}
public Image getImage(Dimension size, Color replaceWith) {
Image img = scaledImages.get(size);
if(img == null) {
img = getImage(replaceWith);
if(img == null) return null;
size = getScaleSize(size);
img = img.getScaledInstance(size.width, size.height, Image.SCALE_SMOOTH);
scaledImages.put(size, img);
}
return img;
}
public Dimension getScaleSize(Dimension target) {
if(aspectRatio) {
BufferedImage img = getImage();
double r = (double)img.getHeight() / img.getWidth();
int min = (int)Math.min(target.getWidth(), target.getHeight());
if(min == target.getWidth()) target.height = (int)(min * r);
else target.width = (int)(min * r);
}
return target;
}
Image doReplaceColor(Image img, final Color oldColor, final Color newColor, final int tol) {
RGBImageFilter f = new RGBImageFilter() {
public int filterRGB(int x, int y, int rgb) {
Color thisColor = new Color(rgb);
if(compareColor(oldColor, thisColor, tol)) {
return newColor.getRGB();
}
return rgb;
}
};
ImageProducer pr = new FilteredImageSource(img.getSource(), f);
return Toolkit.getDefaultToolkit().createImage(pr);
}
boolean compareColor(Color rgb1, Color rgb2, int tolerance) {
return rgb2.getRed() >= rgb1.getRed() - tolerance &&
rgb2.getRed() <= rgb1.getRed() + tolerance &&
rgb2.getGreen() >= rgb1.getGreen() - tolerance &&
rgb2.getGreen() <= rgb1.getGreen() + tolerance &&
rgb2.getBlue() >= rgb1.getBlue() - tolerance &&
rgb2.getBlue() <= rgb1.getBlue() + tolerance;
}
}
@Override
public int defaultDrawingOrder()
{
return -1;
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2018 Groupon, Inc
* Copyright 2014-2018 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.junction.plumbing.billing;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.SortedSet;
import java.util.UUID;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDate;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.account.api.AccountApiException;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.catalog.MockCatalog;
import org.killbill.billing.catalog.api.BillingAlignment;
import org.killbill.billing.catalog.api.CatalogApiException;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.catalog.api.InternationalPrice;
import org.killbill.billing.catalog.api.Plan;
import org.killbill.billing.catalog.api.PlanPhase;
import org.killbill.billing.catalog.api.VersionedCatalog;
import org.killbill.billing.entitlement.api.BlockingState;
import org.killbill.billing.entitlement.api.BlockingStateType;
import org.killbill.billing.entitlement.api.Entitlement.EntitlementState;
import org.killbill.billing.entitlement.dao.MockBlockingStateDao;
import org.killbill.billing.events.EffectiveSubscriptionInternalEvent;
import org.killbill.billing.invoice.api.DryRunArguments;
import org.killbill.billing.junction.BillingEvent;
import org.killbill.billing.junction.BillingEventSet;
import org.killbill.billing.junction.DefaultBlockingState;
import org.killbill.billing.junction.JunctionTestSuiteNoDB;
import org.killbill.billing.mock.MockEffectiveSubscriptionEvent;
import org.killbill.billing.mock.MockSubscription;
import org.killbill.billing.subscription.api.SubscriptionBase;
import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType;
import org.killbill.billing.subscription.api.user.SubscriptionBillingEvent;
import org.killbill.billing.subscription.api.user.DefaultSubscriptionBillingEvent;
import org.killbill.billing.subscription.api.user.SubscriptionBaseApiException;
import org.killbill.billing.subscription.api.user.SubscriptionBaseBundle;
import org.killbill.billing.util.api.TagApiException;
import org.killbill.billing.util.catalog.CatalogDateHelper;
import org.killbill.billing.util.tag.ControlTagType;
import org.killbill.billing.util.tag.dao.MockTagDao;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
public class TestBillingApi extends JunctionTestSuiteNoDB {
private static final String DISABLED_BUNDLE = "disabled-bundle";
private static final String CLEAR_BUNDLE = "clear-bundle";
private static final UUID eventId = new UUID(0L, 0L);
private static final UUID subId = new UUID(1L, 0L);
private static final UUID bunId = new UUID(2L, 0L);
private static final String bunKey = bunId.toString();
private List<SubscriptionBillingEvent> billingTransitions;
private List<EffectiveSubscriptionInternalEvent> effectiveSubscriptionTransitions;
private SubscriptionBase subscription;
private MockCatalog catalog;
@BeforeMethod(groups = "fast")
public void beforeMethod() throws Exception {
if (hasFailed()) {
return;
}
super.beforeMethod();
final SubscriptionBaseBundle bundle = Mockito.mock(SubscriptionBaseBundle.class);
Mockito.when(bundle.getId()).thenReturn(bunId);
final List<SubscriptionBaseBundle> bundles = ImmutableList.<SubscriptionBaseBundle>of(bundle);
billingTransitions = new LinkedList<SubscriptionBillingEvent>();
effectiveSubscriptionTransitions = new LinkedList<EffectiveSubscriptionInternalEvent>();
final DateTime subscriptionStartDate = clock.getUTCNow().minusDays(3);
subscription = new MockSubscription(subId, bunId, null, null, subscriptionStartDate, subscriptionStartDate);
final List<SubscriptionBase> subscriptions = ImmutableList.<SubscriptionBase>of(subscription);
//Mockito.when(subscription.getBillingAlignment(Mockito.<PlanPhaseSpecifier>any(), Mockito.<DateTime>any(), Mockito.<Catalog>any())).thenReturn(BillingAlignment.ACCOUNT);
Mockito.when(subscriptionInternalApi.getBundlesForAccount(Mockito.<UUID>any(), Mockito.<InternalTenantContext>any())).thenReturn(bundles);
Mockito.when(subscriptionInternalApi.getSubscriptionsForBundle(Mockito.<UUID>any(), Mockito.<DryRunArguments>any(), Mockito.<InternalTenantContext>any())).thenReturn(subscriptions);
Mockito.when(subscriptionInternalApi.getSubscriptionsForAccount(Mockito.<VersionedCatalog>any(), Mockito.<LocalDate>any(), Mockito.<InternalTenantContext>any())).thenReturn(ImmutableMap.<UUID, List<SubscriptionBase>>builder()
.put(bunId, subscriptions)
.build());
Mockito.when(subscriptionInternalApi.getSubscriptionFromId(Mockito.<UUID>any(), Mockito.<InternalTenantContext>any())).thenReturn(subscription);
Mockito.when(subscriptionInternalApi.getBundleFromId(Mockito.<UUID>any(), Mockito.<InternalTenantContext>any())).thenReturn(bundle);
Mockito.when(subscriptionInternalApi.getBaseSubscription(Mockito.<UUID>any(), Mockito.<InternalTenantContext>any())).thenReturn(subscription);
Mockito.when(subscriptionInternalApi.getSubscriptionBillingEvents(Mockito.<VersionedCatalog>any(), Mockito.<SubscriptionBase>any(), Mockito.<InternalTenantContext>any())).thenReturn(billingTransitions);
Mockito.when(subscriptionInternalApi.getAllTransitions(Mockito.<SubscriptionBase>any(), Mockito.<InternalTenantContext>any())).thenReturn(effectiveSubscriptionTransitions);
final VersionedCatalog versionedCatalog = catalogService.getFullCatalog(true, true, internalCallContext);
catalog = (MockCatalog) Iterables.getLast(versionedCatalog.getVersions());
Mockito.when(catalogService.getFullCatalog(true, true, internalCallContext)).thenReturn(versionedCatalog);
// Set a default alignment
((MockSubscription) subscription).setBillingAlignment(BillingAlignment.ACCOUNT);
// Cleanup mock daos
((MockBlockingStateDao) blockingStateDao).clear();
((MockTagDao) tagDao).clear();
}
@Test(groups = "fast")
public void testBillingEventsEmpty() throws AccountApiException, CatalogApiException, SubscriptionBaseApiException {
final SortedSet<BillingEvent> events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(new UUID(0L, 0L), null, null, internalCallContext);
Assert.assertEquals(events.size(), 0);
}
@Test(groups = "fast")
public void testBillingEventsNoBillingPeriod() throws CatalogApiException, AccountApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("3-PickupTrialEvergreen10USD");
// The trial has no billing period
final PlanPhase nextPhase = nextPlan.getAllPhases()[0];
final DateTime now = createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(10);
final SortedSet<BillingEvent> events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
checkFirstEvent(events, nextPlan, account.getBillCycleDayLocal(), subId, now, nextPhase, SubscriptionBaseTransitionType.CREATE.toString());
}
@Test(groups = "fast")
public void testBillingEventsSubscriptionAligned() throws CatalogApiException, AccountApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("3-PickupTrialEvergreen10USD");
final PlanPhase nextPhase = nextPlan.getAllPhases()[1];
final DateTime now = createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(1);
((MockSubscription) subscription).setBillingAlignment(BillingAlignment.SUBSCRIPTION);
final SortedSet<BillingEvent> events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
// The expected BCD is when the subscription started since we skip the trial phase
checkFirstEvent(events, nextPlan, subscription.getStartDate().getDayOfMonth(), subId, now, nextPhase, SubscriptionBaseTransitionType.CREATE.toString());
}
@Test(groups = "fast")
public void testBillingEventsAccountAligned() throws CatalogApiException, AccountApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("3-PickupTrialEvergreen10USD");
final PlanPhase nextPhase = nextPlan.getAllPhases()[1];
final DateTime now = createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(32);
final SortedSet<BillingEvent> events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
// The expected BCD is the account BCD (account aligned by default)
checkFirstEvent(events, nextPlan, 32, subId, now, nextPhase, SubscriptionBaseTransitionType.CREATE.toString());
}
@Test(groups = "fast")
public void testBillingEventsBundleAligned() throws CatalogApiException, AccountApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("7-Horn1USD");
final PlanPhase nextPhase = nextPlan.getAllPhases()[0];
final DateTime now = createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(1);
((MockSubscription) subscription).setPlan(catalog.findPlan("3-PickupTrialEvergreen10USD"));
((MockSubscription) subscription).setBillingAlignment(BillingAlignment.BUNDLE);
final SortedSet<BillingEvent> events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
// The expected BCD is when the subscription started
checkFirstEvent(events, nextPlan, subscription.getStartDate().getDayOfMonth(), subId, now, nextPhase, SubscriptionBaseTransitionType.CREATE.toString());
}
@Test(groups = "fast")
public void testBillingEventsWithBlock() throws CatalogApiException, AccountApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("3-PickupTrialEvergreen10USD");
final PlanPhase nextPhase = nextPlan.getAllPhases()[1];
final DateTime now = createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(32);
final BlockingState blockingState1 = new DefaultBlockingState(bunId, BlockingStateType.SUBSCRIPTION_BUNDLE, DISABLED_BUNDLE, "test", true, true, true, now.plusDays(1));
final BlockingState blockingState2 = new DefaultBlockingState(bunId, BlockingStateType.SUBSCRIPTION_BUNDLE, CLEAR_BUNDLE, "test", false, false, false, now.plusDays(2));
blockingStateDao.setBlockingStatesAndPostBlockingTransitionEvent(ImmutableMap.<BlockingState, Optional<UUID>>of(blockingState1, Optional.<UUID>absent(), blockingState2, Optional.<UUID>absent()), internalCallContext);
final SortedSet<BillingEvent> events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
Assert.assertEquals(events.size(), 3);
final Iterator<BillingEvent> it = events.iterator();
checkEvent(it.next(), nextPlan, account.getBillCycleDayLocal(), subId, now, nextPhase, SubscriptionBaseTransitionType.CREATE.toString(), nextPhase.getFixed().getPrice(), nextPhase.getRecurring().getRecurringPrice());
checkEvent(it.next(), nextPlan, account.getBillCycleDayLocal(), subId, now.plusDays(1), nextPhase, SubscriptionBaseTransitionType.START_BILLING_DISABLED.toString(), null, null);
checkEvent(it.next(), nextPlan, account.getBillCycleDayLocal(), subId, now.plusDays(2), nextPhase, SubscriptionBaseTransitionType.END_BILLING_DISABLED.toString(), nextPhase.getFixed().getPrice(), nextPhase.getRecurring().getRecurringPrice());
}
@Test(groups = "fast")
public void testBillingEventsAutoInvoicingOffAccount() throws CatalogApiException, AccountApiException, TagApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("3-PickupTrialEvergreen10USD");
final PlanPhase nextPhase = nextPlan.getAllPhases()[1];
createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(32);
tagInternalApi.addTag(account.getId(), ObjectType.ACCOUNT, ControlTagType.AUTO_INVOICING_OFF.getId(), internalCallContext);
final BillingEventSet events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
assertEquals(events.isAccountAutoInvoiceOff(), true);
assertEquals(events.size(), 1);
}
@Test(groups = "fast")
public void testBillingEventsAutoInvoicingOffBundle() throws CatalogApiException, AccountApiException, TagApiException, SubscriptionBaseApiException {
final Plan nextPlan = catalog.findPlan("3-PickupTrialEvergreen10USD");
final PlanPhase nextPhase = nextPlan.getAllPhases()[1];
createSubscriptionCreationEvent(nextPlan, nextPhase);
final Account account = createAccount(32);
tagInternalApi.addTag(bunId, ObjectType.BUNDLE, ControlTagType.AUTO_INVOICING_OFF.getId(), internalCallContext);
final BillingEventSet events = billingInternalApi.getBillingEventsForAccountAndUpdateAccountBCD(account.getId(), null, null, internalCallContext);
assertEquals(events.getSubscriptionIdsWithAutoInvoiceOff().size(), 1);
assertEquals(events.getSubscriptionIdsWithAutoInvoiceOff().get(0), subId);
assertEquals(events.size(), 0);
}
private void checkFirstEvent(final SortedSet<BillingEvent> events, final Plan nextPlan,
final int BCD, final UUID id, final DateTime time, final PlanPhase nextPhase, final String desc) throws CatalogApiException {
Assert.assertEquals(events.size(), 1);
checkEvent(events.first(), nextPlan, BCD, id, time, nextPhase, desc, nextPhase.getFixed().getPrice(), nextPhase.getRecurring().getRecurringPrice());
}
private void checkEvent(final BillingEvent event, final Plan nextPlan, final int BCD, final UUID id, final DateTime time,
final PlanPhase nextPhase, final String desc, final InternationalPrice fixedPrice, final InternationalPrice recurringPrice) throws CatalogApiException {
if (fixedPrice != null) {
Assert.assertEquals(fixedPrice.getPrice(Currency.USD), event.getFixedPrice());
} else {
assertNull(event.getFixedPrice());
}
if (recurringPrice != null) {
Assert.assertEquals(recurringPrice.getPrice(Currency.USD), event.getRecurringPrice());
} else {
assertNull(event.getRecurringPrice());
}
Assert.assertEquals(BCD, event.getBillCycleDayLocal());
Assert.assertEquals(id, event.getSubscriptionId());
Assert.assertEquals(time.getDayOfMonth(), event.getEffectiveDate().getDayOfMonth());
Assert.assertEquals(nextPhase, event.getPlanPhase());
Assert.assertEquals(nextPlan, event.getPlan());
if (!SubscriptionBaseTransitionType.START_BILLING_DISABLED.equals(event.getTransitionType())) {
Assert.assertEquals(nextPhase.getRecurring().getBillingPeriod(), event.getBillingPeriod());
}
Assert.assertEquals(desc, event.getTransitionType().toString());
}
private Account createAccount(final int billCycleDay) throws AccountApiException {
final Account account = Mockito.mock(Account.class);
Mockito.when(account.getBillCycleDayLocal()).thenReturn(billCycleDay);
Mockito.when(account.getCurrency()).thenReturn(Currency.USD);
Mockito.when(account.getId()).thenReturn(UUID.randomUUID());
Mockito.when(account.getTimeZone()).thenReturn(DateTimeZone.UTC);
Mockito.when(accountInternalApi.getAccountById(Mockito.<UUID>any(), Mockito.<InternalTenantContext>any())).thenReturn(account);
Mockito.when(accountInternalApi.getImmutableAccountDataById(Mockito.<UUID>any(), Mockito.<InternalTenantContext>any())).thenReturn(account);
Mockito.when(accountInternalApi.getBCD(Mockito.<InternalTenantContext>any())).thenReturn(billCycleDay);
return account;
}
private DateTime createSubscriptionCreationEvent(final Plan nextPlan, final PlanPhase nextPhase) throws CatalogApiException {
final DateTime now = clock.getUTCNow();
final DateTime then = now.minusDays(1);
final EffectiveSubscriptionInternalEvent t = new MockEffectiveSubscriptionEvent(
eventId, subId, bunId, bunKey, then, now, null, null, null, null, null, EntitlementState.ACTIVE,
nextPlan.getName(), nextPhase.getName(),
nextPlan.getPriceList().getName(), null, 1L,
SubscriptionBaseTransitionType.CREATE, 1, null, 1L, 2L, null);
effectiveSubscriptionTransitions.add(t);
billingTransitions.add(new DefaultSubscriptionBillingEvent(SubscriptionBaseTransitionType.CREATE, nextPlan, nextPhase, now, 1L, null,
CatalogDateHelper.toUTCDateTime(nextPlan.getCatalog().getEffectiveDate())));
return now;
}
}
| |
/**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.sparkimpl.plugin.privacy.ui;
import java.awt.FlowLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.Collection;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.BorderFactory;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import org.jivesoftware.resource.Res;
import org.jivesoftware.resource.SparkRes;
import org.jivesoftware.smackx.privacy.packet.PrivacyItem;
import org.jivesoftware.spark.component.RolloverButton;
import org.jivesoftware.sparkimpl.plugin.privacy.PrivacyManager;
import org.jivesoftware.sparkimpl.plugin.privacy.list.SparkPrivacyList;
import org.jivesoftware.sparkimpl.plugin.privacy.list.SparkPrivacyListListener;
/**
* @author Bergunde Holger
*/
public class PrivacyListTree extends JPanel implements SparkPrivacyListListener {
private static final long serialVersionUID = 1885262127050966627L;
private DefaultTreeModel _model;
private final JTree _tree;
private final PrivacyManager _pManager;
private final PrivacyTreeNode _top = new PrivacyTreeNode(Res.getString("privacy.root.node"));
private final JComponent _comp;
private final JPanel treeandInfo = new JPanel(new GridBagLayout());
private RolloverButton _actList;
private RolloverButton _defList;
public PrivacyListTree() {
_comp = this;
_pManager = PrivacyManager.getInstance();
this.setLayout(new GridBagLayout());
_model = new DefaultTreeModel(_top);
_tree = new JTree(_model);
_tree.setCellRenderer(new PrivacyTreeCellRenderer());
JScrollPane _scrollPane = new JScrollPane(_tree);
treeandInfo.setBorder(BorderFactory.createTitledBorder(Res.getString("privacy.title.preferences")));
this.add(treeandInfo, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
createInfoPanel();
if (_pManager.isPrivacyActive())
{
treeandInfo.add(_scrollPane, new GridBagConstraints(0, 0, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
initializeTree();
createCurrentListInfoPanel();
} else
{
JLabel notActive = new JLabel(Res.getString("privacy.label.not.supported"));
treeandInfo.add(notActive, new GridBagConstraints(1, 0, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(5, 10, 0, 0), 0, 0));
}
}
private void createInfoPanel() {
_pManager.addListListener(this);
JPanel info = new JPanel(new GridBagLayout());
JLabel infolabel = new JLabel(Res.getString("privacy.label.information"));
info.setBorder(BorderFactory.createTitledBorder(Res.getString("privacy.border.information")));
JLabel iq = new JLabel(Res.getString("privacy.label.iq.desc"), SparkRes.getImageIcon("PRIVACY_QUERY_ALLOW"), SwingConstants.LEFT);
JLabel msg = new JLabel(Res.getString("privacy.label.msg.desc"), SparkRes.getImageIcon("PRIVACY_MSG_ALLOW"), SwingConstants.LEFT);
JLabel pin = new JLabel(Res.getString("privacy.label.pin.desc"), SparkRes.getImageIcon("PRIVACY_PIN_ALLOW"), SwingConstants.LEFT);
JLabel pout = new JLabel(Res.getString("privacy.label.pout.desc"), SparkRes.getImageIcon("PRIVACY_POUT_ALLOW"), SwingConstants.LEFT);
info.add(infolabel, new GridBagConstraints(0, 0, 4, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 10, 10, 0), 0, 0));
info.add(iq, new GridBagConstraints(0, 1, 1, 1, 0.0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 0, 0), 0, 0));
info.add(msg, new GridBagConstraints(1, 1, 1, 1, 0.0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 0, 0), 0, 0));
info.add(pin, new GridBagConstraints(2, 1, 1, 1, 0.0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 0, 0), 0, 0));
info.add(pout, new GridBagConstraints(3, 1, 1, 1, 0.0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 0, 0), 0, 0));
this.add(info, new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
}
private void createCurrentListInfoPanel() {
JPanel listInfo = new JPanel(new GridBagLayout());
_actList = new RolloverButton();
_actList.addActionListener( e -> _pManager.declineActiveList() );
_defList = new RolloverButton();
_defList.addActionListener( e -> _pManager.declineDefaultList() );
_actList.setHorizontalTextPosition(SwingConstants.LEFT);
_defList.setHorizontalTextPosition(SwingConstants.LEFT);
if (_pManager.hasActiveList()) {
_actList.setIcon(SparkRes.getImageIcon("PRIVACY_DEACTIVATE_LIST"));
_actList.setText(_pManager.getActiveList().getListName());
_actList.setIcon(SparkRes.getImageIcon("PRIVACY_DEACTIVATE_LIST"));
} else {
_actList.setText(Res.getString("privacy.button.no.list.selected"));
_actList.setEnabled(false);
_actList.setIcon(null);
}
if (_pManager.hasDefaultList()) {
_defList.setText(_pManager.getDefaultList().getListName());
_defList.setEnabled(true);
_defList.setIcon(SparkRes.getImageIcon("PRIVACY_DEACTIVATE_LIST"));
} else {
_defList.setText(Res.getString("privacy.button.no.list.selected"));
_defList.setEnabled(false);
_defList.setIcon(null);
}
JLabel actListLabel = new JLabel(Res.getString("privacy.label.list.is.active"));
JLabel defListLabel = new JLabel(Res.getString("privacy.label.list.is.default"));
JPanel actPanel = new JPanel(new FlowLayout());
actPanel.add(actListLabel);
actPanel.add(_actList);
JPanel defPanel = new JPanel(new FlowLayout());
defPanel.add(defListLabel);
defPanel.add(_defList);
listInfo.add(actPanel, new GridBagConstraints(0, 0, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
listInfo.add(defPanel, new GridBagConstraints(1, 0, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
treeandInfo.add(listInfo, new GridBagConstraints(0, 1, 1, 1, 0, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
// treeandInfo.add(listInfo);
}
/**
* Initialize the jtree for the UI. Sets the model and adds the PrivacyLists
* to the model using loadPrivacyLists()
*
*/
private void initializeTree() {
_model = new DefaultTreeModel(_top);
_tree.setModel(_model);
loadPrivacyLists();
_tree.expandRow(0);
}
/**
* If selected Node is a Leaf Node and it is a PrivacyItem add remove
* jmenuitem
*
* @param menu
* where the method should add the remove option
* @param node
* the node what is selected
*/
private void addMenuForLeaf(JPopupMenu menu, final PrivacyTreeNode node) {
JMenuItem remUser;
if (_tree.getSelectionPaths().length > 1) {
remUser = new JMenuItem(Res.getString("privacy.menu.add.rem.items", _tree.getSelectionPaths().length));
} else {
remUser = new JMenuItem(Res.getString("privacy.menu.remove") + node.getPrivacyItem().getValue());
}
remUser.setIcon(SparkRes.getImageIcon(SparkRes.SMALL_DELETE));
menu.add(remUser);
remUser.addActionListener( e -> {
for (TreePath path : _tree.getSelectionPaths()) {
PrivacyTreeNode node1 = (PrivacyTreeNode) path.getLastPathComponent();
// Getting privacy List where we want to remove
PrivacyTreeNode parent1 = (PrivacyTreeNode) path.getPathComponent(1);
SparkPrivacyList list = parent1.getPrivacyList();
// Remove contact or group
list.removeItem( node1.getPrivacyItem().getValue());
//list.removePrivacyItem(node.getPrivacyItem().getType(), node.getPrivacyItem().getValue());
list.save();
_model.removeNodeFromParent( node1 );
}
} );
}
/**
* If the selected Node is a GroupNode (TreeItems "Contacts" and "Groups"
* are GroupNodes) add the specified options to the menu
*
* @param menu
* where the method should add the remove option
* @param node
* the node what is selected
*/
private void addMenuForGroupNodes(JPopupMenu menu, final PrivacyTreeNode node) {
String showStringforAdd;
if (node.isContactGroup()) {
showStringforAdd = Res.getString("privacy.menu.add.contacts");
} else {
showStringforAdd = Res.getString("privacy.menu.add.groups");
}
PrivacyTreeNode listnode = (PrivacyTreeNode) _tree.getSelectionPath().getPathComponent(1);
final SparkPrivacyList list = listnode.getPrivacyList();
final PrivacyTreeNode parent = (PrivacyTreeNode) _tree.getSelectionPath().getPathComponent(2);
JMenuItem addContact = new JMenuItem(showStringforAdd);
addContact.setIcon(SparkRes.getImageIcon(SparkRes.SMALL_ADD_IMAGE));
addContact.addActionListener( e -> {
PrivacyAddDialogUI browser = new PrivacyAddDialogUI();
Collection<PrivacyItem> col = browser.showRoster(_comp, !node.isContactGroup());
for (PrivacyItem pI : col) {
final PrivacyItem clone = new PrivacyItem( pI.getType(), pI.getValue(), pI.isAllow(), list.getNewItemOrder() );
list.addItem(clone);
PrivacyTreeNode newChild = new PrivacyTreeNode(clone);
_model.insertNodeInto(newChild, parent, 0);
}
list.save();
} );
menu.add(addContact);
}
/**
* If the selced node is a note that contains the listname of a privacy list
* add the specified options to the menu
*
* @param menu
* where the method should add the remove option
* @param node
* the node what is selected
*/
private void addMenuForListNodes(JPopupMenu menu, final PrivacyTreeNode node) {
JMenuItem addList = new JMenuItem(Res.getString("privacy.menu.add.list"));
addList.setIcon(SparkRes.getImageIcon(SparkRes.SMALL_ADD_IMAGE));
JMenuItem rem = new JMenuItem(Res.getString("privacy.menu.remove.list"));
JMenuItem act = new JMenuItem(Res.getString("privacy.menu.activate.list"));
act.setIcon(SparkRes.getImageIcon("PRIVACY_LIGHTNING"));
JMenuItem def = new JMenuItem(Res.getString("privacy.menu.default.list"));
def.setIcon(SparkRes.getImageIcon("PRIVACY_CHECK"));
act.addActionListener( e -> node.setListAsActive() );
def.addActionListener( e -> node.setListAsDefault() );
addList.addActionListener( e -> {
String s = JOptionPane.showInputDialog(_comp, Res.getString("privacy.dialog.add.list"), Res.getString("privacy.menu.add.list"), JOptionPane.PLAIN_MESSAGE);
if ((s != null) && (s.length() > 0)) {
_pManager.createPrivacyList(s);
addListNode(new PrivacyTreeNode(_pManager.getPrivacyList(s)), _top);
}
} );
rem.addActionListener( e -> {
int n = JOptionPane.showOptionDialog(_comp, Res.getString("privacy.dialog.rem.list", node.getPrivacyList().getListName()), Res.getString("privacy.menu.remove.list"), JOptionPane.YES_NO_OPTION,
JOptionPane.WARNING_MESSAGE, null, // do
// //
// Icon
null, // the titles of buttons
null); // default button title
if (n == JOptionPane.YES_OPTION) {
_pManager.removePrivacyList(node.getPrivacyList().getListName());
_model.removeNodeFromParent(node);
}
} );
rem.setIcon(SparkRes.getImageIcon(SparkRes.SMALL_DELETE));
menu.add(addList);
if (!node.equals(_top)) {
menu.add(rem);
menu.add(act);
menu.add(def);
}
}
/**
* Adds a node to a parent on the jtree using the defaultModel
*
* @param node
* the node that should be added. this is the childnode
* @param parent
* the parent node, where the node should be added to
*/
private void addListNode(PrivacyTreeNode node, DefaultMutableTreeNode parent) {
_model.insertNodeInto(node, parent, 0);
SparkPrivacyList plist = node.getPrivacyList();
PrivacyTreeNode contacts = new PrivacyTreeNode(Res.getString("privacy.node.contacts"));
contacts.setisContactGroup(true);
_model.insertNodeInto(contacts, node, 0);
PrivacyTreeNode groups = new PrivacyTreeNode(Res.getString("privacy.node.groups"));
groups.setisGroupNode(true);
_model.insertNodeInto(groups, node, 0);
for (PrivacyItem pI : plist.getPrivacyItems()) {
if (pI.getType().equals(PrivacyItem.Type.jid)) {
_model.insertNodeInto(new PrivacyTreeNode(pI), contacts, 0);
} else if (pI.getType().equals(PrivacyItem.Type.group)) {
_model.insertNodeInto(new PrivacyTreeNode(pI), groups, 0);
}
}
}
/**
* Loads the PrivacyLists for the first time and adds them to the tree
*/
private void loadPrivacyLists() {
for (SparkPrivacyList list : _pManager.getPrivacyLists()) {
addListNode(new PrivacyTreeNode(list), _top);
}
_tree.addMouseListener(new MouseListener() {
@Override
public void mouseClicked(MouseEvent e) {
int row = _tree.getClosestRowForLocation(e.getX(), e.getY());
if (SwingUtilities.isRightMouseButton(e) && _tree.getSelectionCount() == 1) {
_tree.setSelectionRow(row);
} else if (SwingUtilities.isRightMouseButton(e)) {
boolean found = false;
if (_tree.getSelectionRows() != null) {
for (int i : _tree.getSelectionRows()) {
if (i == row) {
found = true;
break;
}
}
}
if (!found) {
_tree.setSelectionRow(row);
}
}
final PrivacyTreeNode node = (PrivacyTreeNode) _tree.getLastSelectedPathComponent();
JPopupMenu menu = new JPopupMenu("Menu");
if (node == null) {
return;
}
if (node.isLeaf() && !node.isStructureNode() && node.isPrivacyItem()) {
addMenuForLeaf(menu, node);
}
if (node.isStructureNode() && !node.isRoot()) {
addMenuForGroupNodes(menu, node);
}
if (node.isPrivacyList() || node.isRoot()) {
addMenuForListNodes(menu, node);
}
if (SwingUtilities.isRightMouseButton(e)) {
menu.show(_tree, e.getX(), e.getY());
}
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
});
}
@Override
public void listActivated(String listname) {
_actList.setText(listname);
_actList.setEnabled(true);
_actList.setIcon(SparkRes.getImageIcon("PRIVACY_DEACTIVATE_LIST"));
}
@Override
public void listDeActivated(String listname) {
if (_actList.getText().equals(listname))
{
_actList.setText(Res.getString("privacy.button.no.list.selected"));
_actList.setEnabled(false);
_actList.setIcon(null);
}
}
@Override
public void listSetAsDefault(String listname) {
_defList.setText(listname);
_defList.setIcon(SparkRes.getImageIcon("PRIVACY_DEACTIVATE_LIST"));
_defList.setEnabled(true);
}
@Override
public void listRemovedAsDefault(String listname) {
if (_defList.getText().equals(listname))
{
_defList.setText(Res.getString("privacy.button.no.list.selected"));
_defList.setEnabled(false);
_defList.setIcon(null);
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.directconnect.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Information about the associated gateway.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/directconnect-2012-10-25/AssociatedGateway" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AssociatedGateway implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ID of the associated gateway.
* </p>
*/
private String id;
/**
* <p>
* The type of associated gateway.
* </p>
*/
private String type;
/**
* <p>
* The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
* </p>
*/
private String ownerAccount;
/**
* <p>
* The Region where the associated gateway is located.
* </p>
*/
private String region;
/**
* <p>
* The ID of the associated gateway.
* </p>
*
* @param id
* The ID of the associated gateway.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The ID of the associated gateway.
* </p>
*
* @return The ID of the associated gateway.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The ID of the associated gateway.
* </p>
*
* @param id
* The ID of the associated gateway.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssociatedGateway withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The type of associated gateway.
* </p>
*
* @param type
* The type of associated gateway.
* @see GatewayType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The type of associated gateway.
* </p>
*
* @return The type of associated gateway.
* @see GatewayType
*/
public String getType() {
return this.type;
}
/**
* <p>
* The type of associated gateway.
* </p>
*
* @param type
* The type of associated gateway.
* @return Returns a reference to this object so that method calls can be chained together.
* @see GatewayType
*/
public AssociatedGateway withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The type of associated gateway.
* </p>
*
* @param type
* The type of associated gateway.
* @return Returns a reference to this object so that method calls can be chained together.
* @see GatewayType
*/
public AssociatedGateway withType(GatewayType type) {
this.type = type.toString();
return this;
}
/**
* <p>
* The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
* </p>
*
* @param ownerAccount
* The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
*/
public void setOwnerAccount(String ownerAccount) {
this.ownerAccount = ownerAccount;
}
/**
* <p>
* The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
* </p>
*
* @return The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
*/
public String getOwnerAccount() {
return this.ownerAccount;
}
/**
* <p>
* The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
* </p>
*
* @param ownerAccount
* The ID of the AWS account that owns the associated virtual private gateway or transit gateway.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssociatedGateway withOwnerAccount(String ownerAccount) {
setOwnerAccount(ownerAccount);
return this;
}
/**
* <p>
* The Region where the associated gateway is located.
* </p>
*
* @param region
* The Region where the associated gateway is located.
*/
public void setRegion(String region) {
this.region = region;
}
/**
* <p>
* The Region where the associated gateway is located.
* </p>
*
* @return The Region where the associated gateway is located.
*/
public String getRegion() {
return this.region;
}
/**
* <p>
* The Region where the associated gateway is located.
* </p>
*
* @param region
* The Region where the associated gateway is located.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssociatedGateway withRegion(String region) {
setRegion(region);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getType() != null)
sb.append("Type: ").append(getType()).append(",");
if (getOwnerAccount() != null)
sb.append("OwnerAccount: ").append(getOwnerAccount()).append(",");
if (getRegion() != null)
sb.append("Region: ").append(getRegion());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AssociatedGateway == false)
return false;
AssociatedGateway other = (AssociatedGateway) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
if (other.getOwnerAccount() == null ^ this.getOwnerAccount() == null)
return false;
if (other.getOwnerAccount() != null && other.getOwnerAccount().equals(this.getOwnerAccount()) == false)
return false;
if (other.getRegion() == null ^ this.getRegion() == null)
return false;
if (other.getRegion() != null && other.getRegion().equals(this.getRegion()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getOwnerAccount() == null) ? 0 : getOwnerAccount().hashCode());
hashCode = prime * hashCode + ((getRegion() == null) ? 0 : getRegion().hashCode());
return hashCode;
}
@Override
public AssociatedGateway clone() {
try {
return (AssociatedGateway) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.directconnect.model.transform.AssociatedGatewayMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.baidu.oped.apm.rpc.stream;
import com.baidu.oped.apm.rpc.ApmSocketException;
import com.baidu.oped.apm.rpc.packet.PacketType;
import com.baidu.oped.apm.rpc.packet.stream.*;
import com.baidu.oped.apm.rpc.util.AssertUtils;
import com.baidu.oped.apm.rpc.util.IDGenerator;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @author koo.taejin
*/
public class StreamChannelManager {
private static final LoggingStreamChannelStateChangeEventHandler LOGGING_STATE_CHANGE_HANDLER = new LoggingStreamChannelStateChangeEventHandler();
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final Channel channel;
private final IDGenerator idGenerator;
private final ServerStreamChannelMessageListener streamChannelMessageListener;
private final ConcurrentMap<Integer, StreamChannelContext> channelMap = new ConcurrentHashMap<Integer, StreamChannelContext>();
public StreamChannelManager(Channel channel, IDGenerator idGenerator) {
this(channel, idGenerator, DisabledServerStreamChannelMessageListener.INSTANCE);
}
public StreamChannelManager(Channel channel, IDGenerator idGenerator, ServerStreamChannelMessageListener serverStreamChannelMessageListener) {
AssertUtils.assertNotNull(channel, "Channel may not be null.");
AssertUtils.assertNotNull(idGenerator, "IDGenerator may not be null.");
AssertUtils.assertNotNull(serverStreamChannelMessageListener, "ServerStreamChannelMessageListener may not be null.");
this.channel = channel;
this.idGenerator = idGenerator;
this.streamChannelMessageListener = serverStreamChannelMessageListener;
}
public void close() {
Set<Integer> keySet = channelMap.keySet();
for (Integer key : keySet) {
clearResourceAndSendClose(key, StreamCode.STATE_CLOSED);
}
}
public ClientStreamChannelContext openStream(byte[] payload, ClientStreamChannelMessageListener messageListener) {
return openStream(payload, messageListener, LOGGING_STATE_CHANGE_HANDLER);
};
public ClientStreamChannelContext openStream(byte[] payload, ClientStreamChannelMessageListener messageListener, StreamChannelStateChangeEventHandler<ClientStreamChannel> stateChangeListener) {
logger.info("Open streamChannel initialization started. Channel:{} ", channel);
final int streamChannelId = idGenerator.generate();
ClientStreamChannel newStreamChannel = new ClientStreamChannel(channel, streamChannelId, this);
if (stateChangeListener != null) {
newStreamChannel.addStateChangeEventHandler(stateChangeListener);
} else {
newStreamChannel.addStateChangeEventHandler(LOGGING_STATE_CHANGE_HANDLER);
}
newStreamChannel.changeStateOpen();
ClientStreamChannelContext newStreamChannelContext = new ClientStreamChannelContext(newStreamChannel, messageListener);
StreamChannelContext old = channelMap.put(streamChannelId, newStreamChannelContext);
if (old != null) {
throw new ApmSocketException("already streamChannelId exist:" + streamChannelId + " streamChannel:" + old);
}
// the order of below code is very important.
newStreamChannel.changeStateConnectAwait();
newStreamChannel.sendCreate(payload);
newStreamChannel.awaitOpen(3000);
if (newStreamChannel.checkState(StreamChannelStateCode.CONNECTED)) {
logger.info("Open streamChannel initialization completed. Channel:{}, StreamChannelContext:{} ", channel, newStreamChannelContext);
} else {
newStreamChannel.changeStateClose();
channelMap.remove(streamChannelId);
newStreamChannelContext.setCreateFailPacket(new StreamCreateFailPacket(streamChannelId, StreamCode.CONNECTION_TIMEOUT));
}
return newStreamChannelContext;
}
public void messageReceived(StreamPacket packet) {
final int streamChannelId = packet.getStreamChannelId();
final short packetType = packet.getPacketType();
logger.debug("StreamChannel message received. (Channel:{}, StreamId:{}, Packet:{}).", channel, streamChannelId, packet);
if (PacketType.APPLICATION_STREAM_CREATE == packetType) {
handleCreate((StreamCreatePacket) packet);
return;
}
StreamChannelContext context = findStreamChannel(streamChannelId);
if (context == null) {
if (!(PacketType.APPLICATION_STREAM_CLOSE == packetType)) {
clearResourceAndSendClose(streamChannelId, StreamCode.ID_NOT_FOUND);
}
} else {
if (isServerStreamChannelContext(context)) {
messageReceived((ServerStreamChannelContext) context, packet);
} else if (isClientStreamChannelContext(context)) {
messageReceived((ClientStreamChannelContext) context, packet);
} else {
clearResourceAndSendClose(streamChannelId, StreamCode.UNKNWON_ERROR);
}
}
}
private void messageReceived(ServerStreamChannelContext context, StreamPacket packet) {
final short packetType = packet.getPacketType();
final int streamChannelId = packet.getStreamChannelId();
switch (packetType) {
case PacketType.APPLICATION_STREAM_CLOSE:
handleStreamClose(context, (StreamClosePacket)packet);
break;
case PacketType.APPLICATION_STREAM_PING:
handlePing(context, (StreamPingPacket) packet);
break;
case PacketType.APPLICATION_STREAM_PONG:
// handlePong((StreamPongPacket) packet);
break;
default:
clearResourceAndSendClose(streamChannelId, StreamCode.PACKET_UNKNOWN);
logger.info("Unknown StreamPacket received Channel:{}, StreamId:{}, Packet;{}.", channel, streamChannelId, packet);
}
}
private void messageReceived(ClientStreamChannelContext context, StreamPacket packet) {
final short packetType = packet.getPacketType();
final int streamChannelId = packet.getStreamChannelId();
switch (packetType) {
case PacketType.APPLICATION_STREAM_CREATE_SUCCESS:
handleCreateSuccess(context, (StreamCreateSuccessPacket) packet);
break;
case PacketType.APPLICATION_STREAM_CREATE_FAIL:
handleCreateFail(context, (StreamCreateFailPacket) packet);
break;
case PacketType.APPLICATION_STREAM_RESPONSE:
handleStreamResponse(context, (StreamResponsePacket) packet);
break;
case PacketType.APPLICATION_STREAM_CLOSE:
handleStreamClose(context, (StreamClosePacket) packet);
break;
case PacketType.APPLICATION_STREAM_PING:
handlePing(context, (StreamPingPacket) packet);
break;
case PacketType.APPLICATION_STREAM_PONG:
// handlePong((StreamPongPacket) packet);
break;
default:
clearResourceAndSendClose(streamChannelId, StreamCode.PACKET_UNKNOWN);
logger.info("Unknown StreamPacket received Channel:{}, StreamId:{}, Packet;{}.", channel, streamChannelId, packet);
}
}
private void handleCreate(StreamCreatePacket packet) {
final int streamChannelId = packet.getStreamChannelId();
StreamCode code = StreamCode.OK;
ServerStreamChannel streamChannel = new ServerStreamChannel(this.channel, streamChannelId, this);
ServerStreamChannelContext streamChannelContext = new ServerStreamChannelContext(streamChannel);
code = registerStreamChannel(streamChannelContext);
if (code == StreamCode.OK) {
code = streamChannelMessageListener.handleStreamCreate(streamChannelContext, (StreamCreatePacket) packet);
if (code == StreamCode.OK) {
streamChannel.changeStateConnected();
streamChannel.sendCreateSuccess();
}
}
if (code != StreamCode.OK) {
clearResourceAndSendCreateFail(streamChannelId, code);
}
}
private StreamCode registerStreamChannel(ServerStreamChannelContext streamChannelContext) {
int streamChannelId = streamChannelContext.getStreamId();
ServerStreamChannel streamChannel = streamChannelContext.getStreamChannel();
streamChannel.changeStateOpen();
if (channelMap.putIfAbsent(streamChannelId, streamChannelContext) != null) {
streamChannel.changeStateClose();
return StreamCode.ID_DUPLICATED;
}
if (!streamChannel.changeStateConnectArrived()) {
streamChannel.changeStateClose();
channelMap.remove(streamChannelId);
return StreamCode.STATE_ERROR;
}
return StreamCode.OK;
}
private void handleCreateSuccess(ClientStreamChannelContext streamChannelContext, StreamCreateSuccessPacket packet) {
StreamChannel streamChannel = streamChannelContext.getStreamChannel();
streamChannel.changeStateConnected();
}
private void handleCreateFail(ClientStreamChannelContext streamChannelContext, StreamCreateFailPacket packet) {
streamChannelContext.setCreateFailPacket(packet);
clearStreamChannelResource(streamChannelContext.getStreamId());
}
private void handleStreamResponse(ClientStreamChannelContext context, StreamResponsePacket packet) {
int streamChannelId = packet.getStreamChannelId();
StreamChannel streamChannel = context.getStreamChannel();
StreamChannelStateCode currentCode = streamChannel.getCurrentState();
if (StreamChannelStateCode.CONNECTED == currentCode) {
context.getClientStreamChannelMessageListener().handleStreamData(context, packet);
} else if (StreamChannelStateCode.CONNECT_AWAIT == currentCode) {
// may happen in the timing
} else {
clearResourceAndSendClose(streamChannelId, StreamCode.STATE_NOT_CONNECTED);
}
}
private void handleStreamClose(ClientStreamChannelContext context, StreamClosePacket packet) {
context.getClientStreamChannelMessageListener().handleStreamClose(context, (StreamClosePacket) packet);
clearStreamChannelResource(context.getStreamId());
}
private void handleStreamClose(ServerStreamChannelContext context, StreamClosePacket packet) {
streamChannelMessageListener.handleStreamClose(context, packet);
clearStreamChannelResource(context.getStreamId());
}
private void handlePing(StreamChannelContext streamChannelContext, StreamPingPacket packet) {
int streamChannelId = packet.getStreamChannelId();
StreamChannel streamChannel = streamChannelContext.getStreamChannel();
if (!streamChannel.checkState(StreamChannelStateCode.CONNECTED)) {
clearResourceAndSendClose(streamChannelId, StreamCode.STATE_NOT_CONNECTED);
return;
}
streamChannel.sendPong(packet.getRequestId());
}
public StreamChannelContext findStreamChannel(int channelId) {
StreamChannelContext streamChannelContext = this.channelMap.get(channelId);
return streamChannelContext;
}
private ChannelFuture clearResourceAndSendCreateFail(int streamChannelId, StreamCode code) {
clearStreamChannelResource(streamChannelId);
return sendCreateFail(streamChannelId, code);
}
protected ChannelFuture clearResourceAndSendClose(int streamChannelId, StreamCode code) {
clearStreamChannelResource(streamChannelId);
return sendClose(streamChannelId, code);
}
private void clearStreamChannelResource(int streamId) {
StreamChannelContext streamChannelContext = channelMap.remove(streamId);
if (streamChannelContext != null) {
streamChannelContext.getStreamChannel().changeStateClose();
}
}
private ChannelFuture sendCreateFail(int streamChannelId, StreamCode code) {
StreamCreateFailPacket packet = new StreamCreateFailPacket(streamChannelId, code);
return this.channel.write(packet);
}
private ChannelFuture sendClose(int streamChannelId, StreamCode code) {
if (channel.isConnected()) {
StreamClosePacket packet = new StreamClosePacket(streamChannelId, code);
return this.channel.write(packet);
} else {
return null;
}
}
private boolean isServerStreamChannelContext(StreamChannelContext context) {
if (context == null || !(context instanceof ServerStreamChannelContext)) {
return false;
}
return true;
}
private boolean isClientStreamChannelContext(StreamChannelContext context) {
if (context == null || !(context instanceof ClientStreamChannelContext)) {
return false;
}
return true;
}
public boolean isSupportServerMode() {
return streamChannelMessageListener != DisabledServerStreamChannelMessageListener.INSTANCE;
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.exif;
import com.drew.imaging.ImageMetadataReader;
import com.drew.imaging.ImageProcessingException;
import com.drew.lang.GeoLocation;
import com.drew.lang.Rational;
import com.drew.metadata.Metadata;
import com.drew.metadata.exif.ExifIFD0Directory;
import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.drew.metadata.exif.GpsDirectory;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
/**
* Ingest module to parse image Exif metadata. Currently only supports JPEG
* files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact.
*/
public final class ExifParserFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private final AtomicInteger filesProcessed = new AtomicInteger(0);
private volatile boolean filesToFire = false;
private final List<BlackboardArtifact> listOfFacesDetectedArtifacts = new ArrayList<>();
private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private FileTypeDetector fileTypeDetector;
private final HashSet<String> supportedMimeTypes = new HashSet<>();
private TimeZone timeZone = null;
private Blackboard blackboard;
ExifParserFileIngestModule() {
supportedMimeTypes.add("audio/x-wav");
supportedMimeTypes.add("image/jpeg");
supportedMimeTypes.add("image/tiff");
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
refCounter.incrementAndGet(jobId);
try {
fileTypeDetector = new FileTypeDetector();
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
throw new IngestModuleException(NbBundle.getMessage(this.getClass(), "ExifParserFileIngestModule.startUp.fileTypeDetectorInitializationException.msg"));
}
}
@Override
public ProcessResult process(AbstractFile content) {
blackboard = Case.getCurrentCase().getServices().getBlackboard();
//skip unalloc
if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
return ProcessResult.OK;
}
if (content.isFile() == false) {
return ProcessResult.OK;
}
// skip known
if (content.getKnown().equals(TskData.FileKnown.KNOWN)) {
return ProcessResult.OK;
}
// update the tree every 1000 files if we have EXIF data that is not being being displayed
final int filesProcessedValue = filesProcessed.incrementAndGet();
if ((filesProcessedValue % 1000 == 0)) {
if (filesToFire) {
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
filesToFire = false;
}
}
//skip unsupported
if (!parsableFormat(content)) {
return ProcessResult.OK;
}
return processFile(content);
}
ProcessResult processFile(AbstractFile f) {
InputStream in = null;
BufferedInputStream bin = null;
try {
in = new ReadContentInputStream(f);
bin = new BufferedInputStream(in);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
Metadata metadata = ImageMetadataReader.readMetadata(bin);
// Date
ExifSubIFDDirectory exifDir = metadata.getFirstDirectoryOfType(ExifSubIFDDirectory.class);
if (exifDir != null) {
// set the timeZone for the current datasource.
if (timeZone == null) {
try {
Content dataSource = f.getDataSource();
if ((dataSource != null) && (dataSource instanceof Image)) {
Image image = (Image) dataSource;
timeZone = TimeZone.getTimeZone(image.getTimeZone());
}
} catch (TskCoreException ex) {
logger.log(Level.INFO, "Error getting time zones", ex); //NON-NLS
}
}
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
if (date != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
}
}
// GPS Stuff
GpsDirectory gpsDir = metadata.getFirstDirectoryOfType(GpsDirectory.class);
if (gpsDir != null) {
GeoLocation loc = gpsDir.getGeoLocation();
if (loc != null) {
double latitude = loc.getLatitude();
double longitude = loc.getLongitude();
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, ExifParserModuleFactory.getModuleName(), latitude));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, ExifParserModuleFactory.getModuleName(), longitude));
}
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
if (altitude != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE, ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
}
}
// Device info
ExifIFD0Directory devDir = metadata.getFirstDirectoryOfType(ExifIFD0Directory.class);
if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
if (model != null && !model.isEmpty()) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, ExifParserModuleFactory.getModuleName(), model));
}
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
if (make != null && !make.isEmpty()) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, ExifParserModuleFactory.getModuleName(), make));
}
}
// Add the attributes, if there are any, to a new artifact
if (!attributes.isEmpty()) {
BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(bba);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, NbBundle.getMessage(Blackboard.class, "Blackboard.unableToIndexArtifact.error.msg", bba.getDisplayName()), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
NbBundle.getMessage(Blackboard.class, "Blackboard.unableToIndexArtifact.exception.msg"), bba.getDisplayName());
}
filesToFire = true;
}
return ProcessResult.OK;
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage()); //NON-NLS
return ProcessResult.ERROR;
} catch (ImageProcessingException ex) {
logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()}); //NON-NLS
return ProcessResult.ERROR;
} catch (IOException ex) {
logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); //NON-NLS
return ProcessResult.ERROR;
} finally {
try {
if (in != null) {
in.close();
}
if (bin != null) {
bin.close();
}
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to close InputStream.", ex); //NON-NLS
return ProcessResult.ERROR;
}
}
}
/**
* Checks if should try to attempt to extract exif. Currently checks if JPEG
* image (by signature)
*
* @param f file to be checked
*
* @return true if to be processed
*/
private boolean parsableFormat(AbstractFile f) {
try {
String mimeType = fileTypeDetector.getFileType(f);
if (mimeType != null) {
return supportedMimeTypes.contains(mimeType);
} else {
return false;
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to detect file type", ex); //NON-NLS
return false;
}
}
@Override
public void shutDown() {
// We only need to check for this final event on the last module per job
if (refCounter.decrementAndGet(jobId) == 0) {
timeZone = null;
if (filesToFire) {
//send the final new data event
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
}
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.lookup.Lookup;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.FileASTNode;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.openapi.diagnostic.RuntimeExceptionWithAttachments;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.RangeMarkerEx;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.util.PsiUtilCore;
import org.jetbrains.annotations.Contract;
import java.util.List;
/**
* @author peter
*/
class CompletionAssertions {
static void assertCommitSuccessful(Editor editor, PsiFile psiFile) {
Document document = editor.getDocument();
int docLength = document.getTextLength();
int psiLength = psiFile.getTextLength();
PsiDocumentManager manager = PsiDocumentManager.getInstance(psiFile.getProject());
boolean committed = !manager.isUncommited(document);
if (docLength == psiLength && committed) {
return;
}
FileViewProvider viewProvider = psiFile.getViewProvider();
String message = "unsuccessful commit:";
message += "\nmatching=" + (psiFile == manager.getPsiFile(document));
message += "\ninjectedEditor=" + (editor instanceof EditorWindow);
message += "\ninjectedFile=" + InjectedLanguageManager.getInstance(psiFile.getProject()).isInjectedFragment(psiFile);
message += "\ncommitted=" + committed;
message += "\nfile=" + psiFile.getName();
message += "\nfile class=" + psiFile.getClass();
message += "\nfile.valid=" + psiFile.isValid();
message += "\nfile.physical=" + psiFile.isPhysical();
message += "\nfile.eventSystemEnabled=" + viewProvider.isEventSystemEnabled();
message += "\nlanguage=" + psiFile.getLanguage();
message += "\ndoc.length=" + docLength;
message += "\npsiFile.length=" + psiLength;
String fileText = psiFile.getText();
if (fileText != null) {
message += "\npsiFile.text.length=" + fileText.length();
}
FileASTNode node = psiFile.getNode();
if (node != null) {
message += "\nnode.length=" + node.getTextLength();
String nodeText = node.getText();
message += "\nnode.text.length=" + nodeText.length();
}
VirtualFile virtualFile = viewProvider.getVirtualFile();
message += "\nvirtualFile=" + virtualFile;
message += "\nvirtualFile.class=" + virtualFile.getClass();
message += "\n" + DebugUtil.currentStackTrace();
throw new RuntimeExceptionWithAttachments(
"Commit unsuccessful", message,
new Attachment(virtualFile.getPath() + "_file.txt", StringUtil.notNullize(fileText)),
createAstAttachment(psiFile, psiFile),
new Attachment("docText.txt", document.getText()));
}
static void checkEditorValid(Editor editor) {
if (!isEditorValid(editor)) {
throw new AssertionError();
}
}
static boolean isEditorValid(Editor editor) {
return !(editor instanceof EditorWindow) || ((EditorWindow)editor).isValid();
}
private static Attachment createAstAttachment(PsiFile fileCopy, final PsiFile originalFile) {
return new Attachment(originalFile.getViewProvider().getVirtualFile().getPath() + " syntactic tree.txt", DebugUtil.psiToString(fileCopy, false, true));
}
private static Attachment createFileTextAttachment(PsiFile fileCopy, final PsiFile originalFile) {
return new Attachment(originalFile.getViewProvider().getVirtualFile().getPath(), fileCopy.getText());
}
static void assertInjectedOffsets(int hostStartOffset, PsiFile injected, DocumentWindow documentWindow) {
assert documentWindow != null : "no DocumentWindow for an injected fragment";
TextRange host = InjectedLanguageManager.getInstance(injected.getProject()).injectedToHost(injected, injected.getTextRange());
assert hostStartOffset >= host.getStartOffset() : "startOffset before injected";
assert hostStartOffset <= host.getEndOffset() : "startOffset after injected";
}
static void assertHostInfo(PsiFile hostCopy, OffsetMap hostMap) {
PsiUtilCore.ensureValid(hostCopy);
if (hostMap.getOffset(CompletionInitializationContext.START_OFFSET) > hostCopy.getTextLength()) {
throw new AssertionError("startOffset outside the host file: " + hostMap.getOffset(CompletionInitializationContext.START_OFFSET) + "; " + hostCopy);
}
}
@Contract("_,_,_,null->fail")
static void assertCompletionPositionPsiConsistent(OffsetsInFile offsets,
int offset,
PsiFile originalFile, PsiElement insertedElement) {
PsiFile fileCopy = offsets.getFile();
if (insertedElement == null) {
throw new RuntimeExceptionWithAttachments(
"No element at insertion offset",
"offset=" + offset,
createFileTextAttachment(fileCopy, originalFile),
createAstAttachment(fileCopy, originalFile));
}
final TextRange range = insertedElement.getTextRange();
CharSequence fileCopyText = fileCopy.getViewProvider().getContents();
if ((range.getEndOffset() > fileCopyText.length()) ||
!fileCopyText.subSequence(range.getStartOffset(), range.getEndOffset()).toString().equals(insertedElement.getText())) {
throw new RuntimeExceptionWithAttachments(
"Inconsistent completion tree",
"range=" + range,
createFileTextAttachment(fileCopy, originalFile),
createAstAttachment(fileCopy, originalFile),
new Attachment("Element at caret.txt", insertedElement.getText()));
}
}
static void assertCorrectOriginalFile(String prefix, PsiFile file, PsiFile copy) {
if (copy.getOriginalFile() != file) {
throw new AssertionError(prefix + " copied file doesn't have correct original: noOriginal=" + (copy.getOriginalFile() == copy) +
"\n file " + fileInfo(file) +
"\n copy " + fileInfo(copy));
}
}
private static String fileInfo(PsiFile file) {
return file + " of " + file.getClass() +
" in " + file.getViewProvider() + ", languages=" + file.getViewProvider().getLanguages() +
", physical=" + file.isPhysical();
}
static class WatchingInsertionContext extends InsertionContext {
private RangeMarkerEx tailWatcher;
Throwable invalidateTrace;
DocumentEvent killer;
private RangeMarkerSpy spy;
public WatchingInsertionContext(OffsetMap offsetMap, PsiFile file, char completionChar, List<LookupElement> items, Editor editor) {
super(offsetMap, completionChar, items.toArray(LookupElement.EMPTY_ARRAY),
file, editor,
completionChar != Lookup.AUTO_INSERT_SELECT_CHAR && completionChar != Lookup.REPLACE_SELECT_CHAR &&
completionChar != Lookup.NORMAL_SELECT_CHAR);
}
@Override
public void setTailOffset(int offset) {
super.setTailOffset(offset);
watchTail(offset);
}
private void watchTail(int offset) {
stopWatching();
tailWatcher = (RangeMarkerEx)getDocument().createRangeMarker(offset, offset);
if (!tailWatcher.isValid()) {
throw new AssertionError(getDocument() + "; offset=" + offset);
}
tailWatcher.setGreedyToRight(true);
spy = new RangeMarkerSpy(tailWatcher) {
@Override
protected void invalidated(DocumentEvent e) {
if (invalidateTrace == null) {
invalidateTrace = new Throwable();
killer = e;
}
}
};
getDocument().addDocumentListener(spy);
}
void stopWatching() {
if (tailWatcher != null) {
getDocument().removeDocumentListener(spy);
tailWatcher.dispose();
}
}
@Override
public int getTailOffset() {
if (!getOffsetMap().containsOffset(TAIL_OFFSET) && invalidateTrace != null) {
throw new RuntimeExceptionWithAttachments("Tail offset invalid", new Attachment("invalidated", invalidateTrace));
}
int offset = super.getTailOffset();
if (tailWatcher.getStartOffset() != tailWatcher.getEndOffset() && offset > 0) {
watchTail(offset);
}
return offset;
}
}
}
| |
package org.motechproject.server.startup;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.joda.time.DateTime;
import org.motechproject.commons.api.MotechException;
import org.motechproject.commons.couchdb.service.CouchDbManager;
import org.motechproject.server.config.ConfigLoader;
import org.motechproject.server.config.domain.SettingsRecord;
import org.motechproject.server.config.service.AllSettings;
import org.motechproject.server.config.settings.ConfigFileSettings;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleException;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.PostConstruct;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
public final class StartupManager {
private static StartupManager instance;
private static final String SETTINGS_DB = "motech-platform-startup";
private static final String ADMIN_SYMBOLIC_NAME = "org.motechproject.motech-admin-bundle";
private static final String STARTUP_TOPIC = "org/motechproject/osgi/event/STARTUP";
private static final Logger LOGGER = LoggerFactory.getLogger(StartupManager.class);
private MotechPlatformState platformState = MotechPlatformState.STARTUP;
private ConfigFileSettings configFileSettings;
@Autowired
private ConfigLoader configLoader;
@Autowired
private CouchDbManager couchDbManager;
@Autowired
private EventAdmin eventAdmin;
@Autowired
private BundleContext bundleContext;
private StartupManager() {
}
public static StartupManager getInstance() {
if (instance == null) {
instance = new StartupManager();
}
return instance;
}
public MotechPlatformState getPlatformState() {
return platformState;
}
@PostConstruct
public void startup() {
startup(true);
}
public void startup(boolean startAllBundles) {
if (configFileSettings != null) {
configFileSettings = null;
}
configFileSettings = configLoader.loadConfig();
// check if settings were loaded from config locations
if (configFileSettings == null) {
platformState = MotechPlatformState.NEED_CONFIG;
configFileSettings = configLoader.loadDefaultConfig();
} else {
LOGGER.info("Loaded config from " + configFileSettings.getFileURL());
platformState = MotechPlatformState.STARTUP;
}
if (platformState != MotechPlatformState.NEED_CONFIG) {
syncSettingsWithDb();
}
if (canLaunchBundles()) {
if (startAllBundles) {
// send an OSGI event indicating that the modules can be started
eventAdmin.postEvent(new Event(STARTUP_TOPIC, (Map) null));
} else {
// only start the admin bundle
startAdmin();
}
}
}
private void startAdmin() {
Bundle adminBundle = getAdminBundle();
if (adminBundle == null) {
LOGGER.warn("Admin bundle not installed");
} else if (adminBundle.getState() == Bundle.ACTIVE) {
LOGGER.warn("Admin bundle already active");
} else {
try {
adminBundle.start();
} catch (BundleException e) {
throw new MotechException("Cannot start admin bundle", e);
}
}
}
public boolean canLaunchBundles() {
return platformState == MotechPlatformState.FIRST_RUN || platformState == MotechPlatformState.NORMAL_RUN;
}
public ConfigFileSettings getLoadedConfig() {
return configFileSettings;
}
public boolean findCouchDBInstance(final String url) {
boolean found = false;
try {
HttpClient httpClient = new DefaultHttpClient();
HttpGet request = new HttpGet(url);
request.addHeader("accept", "application/json");
HttpResponse response = httpClient.execute(request);
if (response.getStatusLine().getStatusCode() == 200) {
found = true;
}
} catch (IOException e) {
found = false;
}
return found;
}
public boolean findActiveMQInstance(final String url) {
Connection connection = null;
boolean found = false;
try {
ConnectionFactory factory = new ActiveMQConnectionFactory(url);
connection = factory.createConnection();
connection.start();
} catch (JMSException e) {
found = false;
} finally {
if (connection != null) {
try {
connection.close();
found = true;
} catch (JMSException e) {
found = false;
}
}
}
return found;
}
public boolean findSchedulerInstance(final String url) {
return false;
}
private Bundle getAdminBundle() {
for (Bundle bundle : bundleContext.getBundles()) {
if (bundle.getSymbolicName().equals(ADMIN_SYMBOLIC_NAME)) {
return bundle;
}
}
return null;
}
private void syncSettingsWithDb() {
try {
AllSettings allSettings = new AllSettings(couchDbManager.getConnector(SETTINGS_DB));
SettingsRecord dbSettings = allSettings.getSettings();
if (dbSettings.getLastRun() == null) {
platformState = MotechPlatformState.FIRST_RUN;
} else {
platformState = MotechPlatformState.NORMAL_RUN;
}
if (platformState == MotechPlatformState.FIRST_RUN ||
!Arrays.equals(configFileSettings.getMd5checkSum(), dbSettings.getConfigFileChecksum())) {
LOGGER.info("Updating database startup");
dbSettings.updateSettings(configFileSettings);
}
dbSettings.setLastRun(DateTime.now());
dbSettings.setConfigFileChecksum(configFileSettings.getMd5checkSum());
allSettings.addOrUpdateSettings(dbSettings);
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
platformState = MotechPlatformState.DB_ERROR;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.streams.errors.ProcessorStateException;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.streams.state.StateSerdes;
import java.util.Map;
import java.util.Objects;
import static org.apache.kafka.common.metrics.Sensor.RecordingLevel.DEBUG;
import static org.apache.kafka.streams.state.internals.metrics.Sensors.createTaskAndStoreLatencyAndThroughputSensors;
public class MeteredSessionStore<K, V>
extends WrappedStateStore<SessionStore<Bytes, byte[]>, Windowed<K>, V>
implements SessionStore<K, V> {
private final String metricScope;
private final Serde<K> keySerde;
private final Serde<V> valueSerde;
private final Time time;
private StateSerdes<K, V> serdes;
private StreamsMetricsImpl metrics;
private Sensor putTime;
private Sensor fetchTime;
private Sensor flushTime;
private Sensor removeTime;
private String taskName;
MeteredSessionStore(final SessionStore<Bytes, byte[]> inner,
final String metricScope,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final Time time) {
super(inner);
this.metricScope = metricScope;
this.keySerde = keySerde;
this.valueSerde = valueSerde;
this.time = time;
}
@SuppressWarnings("unchecked")
@Override
public void init(final ProcessorContext context,
final StateStore root) {
//noinspection unchecked
serdes = new StateSerdes<>(
ProcessorStateManager.storeChangelogTopic(context.applicationId(), name()),
keySerde == null ? (Serde<K>) context.keySerde() : keySerde,
valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde);
metrics = (StreamsMetricsImpl) context.metrics();
taskName = context.taskId().toString();
final String metricsGroup = "stream-" + metricScope + "-metrics";
final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all");
final Map<String, String> storeTags = metrics.tagMap("task-id", taskName, metricScope + "-id", name());
putTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "put", metrics, metricsGroup, taskName, name(), taskTags, storeTags);
fetchTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "fetch", metrics, metricsGroup, taskName, name(), taskTags, storeTags);
flushTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "flush", metrics, metricsGroup, taskName, name(), taskTags, storeTags);
removeTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "remove", metrics, metricsGroup, taskName, name(), taskTags, storeTags);
final Sensor restoreTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "restore", metrics, metricsGroup, taskName, name(), taskTags, storeTags);
// register and possibly restore the state from the logs
final long startNs = time.nanoseconds();
try {
super.init(context, root);
} finally {
metrics.recordLatency(
restoreTime,
startNs,
time.nanoseconds()
);
}
}
@SuppressWarnings("unchecked")
@Override
public boolean setFlushListener(final CacheFlushListener<Windowed<K>, V> listener,
final boolean sendOldValues) {
final SessionStore<Bytes, byte[]> wrapped = wrapped();
if (wrapped instanceof CachedStateStore) {
return ((CachedStateStore<byte[], byte[]>) wrapped).setFlushListener(
(key, newValue, oldValue, timestamp) -> listener.apply(
SessionKeySchema.from(key, serdes.keyDeserializer(), serdes.topic()),
newValue != null ? serdes.valueFrom(newValue) : null,
oldValue != null ? serdes.valueFrom(oldValue) : null,
timestamp
),
sendOldValues);
}
return false;
}
@Override
public void put(final Windowed<K> sessionKey,
final V aggregate) {
Objects.requireNonNull(sessionKey, "sessionKey can't be null");
final long startNs = time.nanoseconds();
try {
final Bytes key = keyBytes(sessionKey.key());
wrapped().put(new Windowed<>(key, sessionKey.window()), serdes.rawValue(aggregate));
} catch (final ProcessorStateException e) {
final String message = String.format(e.getMessage(), sessionKey.key(), aggregate);
throw new ProcessorStateException(message, e);
} finally {
metrics.recordLatency(putTime, startNs, time.nanoseconds());
}
}
@Override
public void remove(final Windowed<K> sessionKey) {
Objects.requireNonNull(sessionKey, "sessionKey can't be null");
final long startNs = time.nanoseconds();
try {
final Bytes key = keyBytes(sessionKey.key());
wrapped().remove(new Windowed<>(key, sessionKey.window()));
} catch (final ProcessorStateException e) {
final String message = String.format(e.getMessage(), sessionKey.key());
throw new ProcessorStateException(message, e);
} finally {
metrics.recordLatency(removeTime, startNs, time.nanoseconds());
}
}
@Override
public V fetchSession(final K key, final long startTime, final long endTime) {
Objects.requireNonNull(key, "key cannot be null");
final Bytes bytesKey = keyBytes(key);
final long startNs = time.nanoseconds();
try {
final byte[] result = wrapped().fetchSession(bytesKey, startTime, endTime);
if (result == null) {
return null;
}
return serdes.valueFrom(result);
} finally {
metrics.recordLatency(flushTime, startNs, time.nanoseconds());
}
}
@Override
public KeyValueIterator<Windowed<K>, V> fetch(final K key) {
Objects.requireNonNull(key, "key cannot be null");
return new MeteredWindowedKeyValueIterator<>(
wrapped().fetch(keyBytes(key)),
fetchTime,
metrics,
serdes,
time);
}
@Override
public KeyValueIterator<Windowed<K>, V> fetch(final K from,
final K to) {
Objects.requireNonNull(from, "from cannot be null");
Objects.requireNonNull(to, "to cannot be null");
return new MeteredWindowedKeyValueIterator<>(
wrapped().fetch(keyBytes(from), keyBytes(to)),
fetchTime,
metrics,
serdes,
time);
}
@Override
public KeyValueIterator<Windowed<K>, V> findSessions(final K key,
final long earliestSessionEndTime,
final long latestSessionStartTime) {
Objects.requireNonNull(key, "key cannot be null");
final Bytes bytesKey = keyBytes(key);
return new MeteredWindowedKeyValueIterator<>(
wrapped().findSessions(
bytesKey,
earliestSessionEndTime,
latestSessionStartTime),
fetchTime,
metrics,
serdes,
time);
}
@Override
public KeyValueIterator<Windowed<K>, V> findSessions(final K keyFrom,
final K keyTo,
final long earliestSessionEndTime,
final long latestSessionStartTime) {
Objects.requireNonNull(keyFrom, "keyFrom cannot be null");
Objects.requireNonNull(keyTo, "keyTo cannot be null");
final Bytes bytesKeyFrom = keyBytes(keyFrom);
final Bytes bytesKeyTo = keyBytes(keyTo);
return new MeteredWindowedKeyValueIterator<>(
wrapped().findSessions(
bytesKeyFrom,
bytesKeyTo,
earliestSessionEndTime,
latestSessionStartTime),
fetchTime,
metrics,
serdes,
time);
}
@Override
public void flush() {
final long startNs = time.nanoseconds();
try {
super.flush();
} finally {
metrics.recordLatency(flushTime, startNs, time.nanoseconds());
}
}
@Override
public void close() {
super.close();
metrics.removeAllStoreLevelSensors(taskName, name());
}
private Bytes keyBytes(final K key) {
return Bytes.wrap(serdes.rawKey(key));
}
}
| |
package scalaSci.math.LinearAlgebra;
import scalaSci.math.array.*;
import Jama.JamaCholeskyDecomposition;
import Jama.JamaEigenvalueDecomposition;
import Jama.JamaLUDecomposition;
import Jama.JamaQRDecomposition;
import Jama.jMatrix;
import Jama.JamaSingularValueDecomposition;
/**
* A collection of static methods for performing math operations on matrices and arrays.
* Advanced Linear Algebra methods (decompositions, norm, ...) are just call for JAMA routines.
*
* @author richet
*/
public class LinearAlgebra extends DoubleArray {
// linear algebra methods
/**
* Element-wise subtraction of two arrays. Arrays must be same size.
*
* @param v1 Minuend.
* @param v2 Subtrahend
* @return Array v1 - v2
*/
public static double[] minus(double[] v1, double[] v2) {
checkLength(v2, v1.length);
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] - v2[i];
return array;
}
/**
* Subtracts a scalar value from each element of an array
*
* @param v1 Minuend Array.
* @param v Subtrahend scalar
* @return Array v1 - v
*/
public static double[] minus(double[] v1, double v) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] - v;
return array;
}
/**
* Subtracts each element of an array from a scalar value.
*
* @param v Scalar Minuend
* @param v1 Subtrahend array
* @return Array v - v1
*/
public static double[] minus(double v, double[] v1) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v - v1[i];
return array;
}
/**
* Element-wise subtraction of two matrices. Matrices must be same size.
*
* @param v1 Minuend matrix
* @param v2 Subtrahend matrix
* @return jMatrix v1 - v2
*/
public static double[][] minus(double[][] v1, double[][] v2) {
checkRowDimension(v2, v1.length);
checkColumnDimension(v2, v1[0].length);
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[0].length; j++)
array[i][j] = v1[i][j] - v2[i][j];
return array;
}
/**
* Subtract a scalar from each element of a matrix.
*
* @param v1 Minuend matrix
* @param v2 Scalar subtrahend
* @return jMatrix v1 - v2
*/
public static double[][] minus(double[][] v1, double v2) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[0].length; j++)
array[i][j] = v1[i][j] - v2;
return array;
}
/**
* Subtract each element of a matrix from a scalar.
*
* @param v2 Scalar minuend
* @param v1 jMatrix subtrahend
* @return jMatrix v2 - v1
*/
public static double[][] minus(double v2, double[][] v1) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[0].length; j++)
array[i][j] = v2 - v1[i][j];
return array;
}
/**
* Element-wise sum of any number of arrays. Each array must be of same length.
*
* @param v Any number of arrays
* @return Element-wise sum of input arrays.
*/
public static double[] plus(double[]... v) {
double[] array = new double[v[0].length];
for (int j = 0; j < v.length; j++)
for (int i = 0; i < v[j].length; i++)
array[i] += v[j][i];
return array;
}
/**
* Add a scalar value to each element of an array.
*
* @param v1 Array
* @param v Scalar
* @return v1 + v
*/
public static double[] plus(double[] v1, double v) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] + v;
return array;
}
public static double[] plus(double v, double v1[]) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] + v;
return array;
}
/**
* Element-wise sum of two matrices
*
* @param v1 jMatrix
* @param v2 jMatrix
* @return jMatrix v1 + v2
*/
public static double[][] plus(double[][] v1, double[][] v2) {
checkRowDimension(v2, v1.length);
checkColumnDimension(v2, v1[0].length);
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[0].length; j++)
array[i][j] = v1[i][j] + v2[i][j];
return array;
}
/**
* Add a scalar to each element of a matrix.
*
* @param v1 jMatrix
* @param v2 Scalar
* @return jMatrix v1 + v2
*/
public static double[][] plus(double[][] v1, double v2) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[0].length; j++)
array[i][j] = v1[i][j] + v2;
return array;
}
public static double[][] plus(double v2, double[][] v1) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[0].length; j++)
array[i][j] = v1[i][j] + v2;
return array;
}
/**
* Element-wise product of any number of arrays. Each array must be same size.
*
* @param v Any number of arrays.
* @return Array. i'th element = v1(i)*v2(i)*v3(i)...
*/
public static double[] times(double[]... v) {
double[] array = fill(v[0].length, 1.0);
for (int j = 0; j < v.length; j++)
for (int i = 0; i < v[j].length; i++)
array[i] *= v[j][i];
return array;
}
/**
* Element-wise ratio of two arrays.
*
* @param v1 Numerators
* @param v2 Denominators
* @return Array. i'th element = v1(i)/v2(i)
*/
public static double[] divide(double[] v1, double[] v2) {
checkLength(v1, v2.length);
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] / v2[i];
return array;
}
/**
* Multiply each element of an array by a scalar.
*
* @param v1 Array
* @param v Scalar
* @return v1 * v
*/
public static double[] times(double[] v1, double v) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] * v;
return array;
}
public static double[] times(double v, double[] v1) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] * v;
return array;
}
/**
* Multiply each element in a matrix by a scalar
*
* @param v1 jMatrix
* @param v Scalar
* @return v1 * v
*/
public static double[][] times(double[][] v1, double v) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[i].length; j++)
array[i][j] = v1[i][j] * v;
return array;
}
public static double[][] times(double v, double[][] v1) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[i].length; j++)
array[i][j] = v1[i][j] * v;
return array;
}
/**
* Divide each element of an array by a scalar.
*
* @param v1 Numerator Array
* @param v Scalar denominator
* @return Array. i'th element is v1(i)/v
*/
public static double[] divide(double[] v1, double v) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = v1[i] / v;
return array;
}
/**
* Divide each element of a matrix by a scalar
*
* @param v1 jMatrix numerator
* @param v Scalar denominator
* @return jMatrix v1 / v
*/
public static double[][] divide(double[][] v1, double v) {
double[][] array = new double[v1.length][v1[0].length];
for (int i = 0; i < v1.length; i++)
for (int j = 0; j < v1[i].length; j++)
array[i][j] = v1[i][j] / v;
return array;
}
/**
* Raise each element of an array to a scalar power.
*
* @param v Array
* @param n Scalar exponent
* @return Array. i'th element is v(i)^n
*/
public static double[] raise(double[] v1, double n) {
double[] array = new double[v1.length];
for (int i = 0; i < v1.length; i++)
array[i] = Math.pow(v1[i], n);
return array;
}
public static double[][] raise(double[][] v, double n) {
double[][] array = new double[v.length][v[0].length];
for (int i = 0; i < v.length; i++)
for (int j = 0; j < v[i].length; j++)
array[i][j] = Math.pow(v[i][j], n);
return array;
}
public static double[][] times(double[][] v1, double[][] v2) {
int v1Rows = v1.length; // # rows of the result matrix
int v2Cols = v2[0].length; // # cols of the result matrix
double[][] result = new double[v1Rows][v2Cols];
int v1Cols = v1[0].length;
double[] v1Colj = new double[v1Cols];
for (int j = 0; j < v2Cols; j++) {
for (int k = 0; k < v1Cols; k++) {
v1Colj[k] = v2[k][j];
}
for (int i = 0; i < v1Rows; i++) {
double[] Arowi = v1[i];
double s = 0;
for (int k = 0; k < v1Cols; k++) {
s += Arowi[k] * v1Colj[k];
}
result[i][j] = s;
}
}
return result;
}
public static double[] times(double[][] v1, double[] v2) {
checkLength(v2, v1[0].length);
return getColumnCopy(times(v1, columnVector(v2)), 0);
}
// Now follows JAMA methods calls //
public static double[][] divideLU(double[][] v1, double[]... v2) {
return LU(v2).solve(jMatrix.constructWithCopy(v1)).getArray();
}
public static double[][] divideQR(double[][] v1, double[]... v2) {
return QR(v2).solve(jMatrix.constructWithCopy(v1)).getArray();
}
public static double[][] divide(double[][] v1, double[]... v2) {
return divideQR(v1, v2);
}
public static double[][] inverseLU(double[][] v1) {
checkColumnDimension(v1, v1.length);
return LU(v1).solve(jMatrix.identity(v1.length, v1.length)).getArray();
}
public static double[][] inverseQR(double[][] v1) {
checkColumnDimension(v1, v1.length);
return QR(v1).solve(jMatrix.identity(v1.length, v1.length)).getArray();
}
public static double[][] inverse(double[][] v1) {
return new jMatrix(v1).inverse().getArray();
}
public static double[][] solve(double[][] A, double[][] B) {
return new jMatrix(A).solve(new jMatrix(B)).getArray();
}
public static Jama.JamaEigenvalueDecomposition eigen(double[][] v) {
return new Jama.JamaEigenvalueDecomposition(v);
}
public static Jama.JamaQRDecomposition QR(double[][] v) {
return new Jama.JamaQRDecomposition(jMatrix.constructWithCopy(v));
}
public static Jama.JamaLUDecomposition LU(double[][] v) {
return new Jama.JamaLUDecomposition(jMatrix.constructWithCopy(v));
}
public static Jama.JamaCholeskyDecomposition cholesky(double[][] v) {
return new Jama.JamaCholeskyDecomposition(new jMatrix(v));
}
public static Jama.JamaSingularValueDecomposition singular(double[][] v) {
return new Jama.JamaSingularValueDecomposition(new jMatrix(v));
}
public static double cond(double[][] v) {
return new jMatrix(v).cond();
}
public static double det(double[][] v) {
return new jMatrix(v).det();
}
public static int rank(double[][] v) {
return new jMatrix(v).rank();
}
public static double trace(double[][] v) {
return new jMatrix(v).trace();
}
public static double norm1(double[][] v) {
return new jMatrix(v).norm1();
}
public static double norm2(double[][] v) {
return new jMatrix(v).norm2();
}
public static double normF(double[][] v) {
return new jMatrix(v).normF();
}
public static double normInf(double[][] v) {
return new jMatrix(v).normInf();
}
}
| |
/*
* Copyright (C) 2013 OTAPlatform
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.oct.updater.manager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.NotificationManager;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.AsyncTask.Status;
import android.os.Environment;
import android.os.StatFs;
import android.text.TextUtils;
import android.widget.EditText;
import android.widget.Toast;
import com.oct.updater.DownloadService;
import com.oct.updater.R;
import com.oct.updater.updater.CancelPackage;
import com.oct.updater.updater.TWRPUpdater;
import com.oct.updater.updater.Updater;
import com.oct.updater.updater.Updater.PackageInfo;
import com.oct.updater.util.Constants;
import com.oct.updater.util.DownloadTask.DownloadStatus;
import com.oct.updater.util.FileItem;
public class FileManager extends Manager {
private List<FileItem> mItems;
private String mInternalStoragePath;
private String mExternalStoragePath;
private int mSelectedBackup = -1;
protected FileManager(Context context) {
super(context);
calculateItems();
readMounts();
}
public double getSpaceLeft() {
StatFs stat = new StatFs(Environment.getExternalStorageDirectory().getPath());
double sdAvailSize = (double) stat.getAvailableBlocks() * (double) stat.getBlockSize();
// One binary gigabyte equals 1,073,741,824 bytes.
return sdAvailSize / 1073741824;
}
public String getInternalStoragePath() {
return mInternalStoragePath;
}
public String getExternalStoragePath() {
return mExternalStoragePath;
}
public void removeItem(FileItem item) {
mItems.remove(item);
ManagerFactory.getPreferencesManager(mContext).removeFlashQueue(item.toString());
}
public List<FileItem> getFileItems() {
if (mItems.size() == 0) {
calculateItems();
}
return mItems;
}
public void clearItems() {
mItems.clear();
}
private void calculateItems() {
String[] queue = ManagerFactory.getPreferencesManager(mContext).getFlashQueue();
mItems = new ArrayList<FileItem>();
for (String q : queue) {
FileItem item = new FileItem(q);
File file = new File(item.getPath());
if (file.exists()) {
mItems.add(item);
}
}
}
public Updater.PackageInfo onNewIntent(Context context, Intent intent) {
DownloadService.FileInfo fileInfo = (DownloadService.FileInfo) (intent.getExtras() == null ? null
: intent.getExtras().get(Constants.FILE_INFO));
int notificationId = fileInfo.notificationId;
if (notificationId == Constants.NEWROMVERSION_NOTIFICATION_ID
|| notificationId == Constants.NEWGAPPSVERSION_NOTIFICATION_ID) {
PackageInfo info = fileInfo.packageInfo;
NotificationManager nMgr = (NotificationManager) context
.getSystemService(Context.NOTIFICATION_SERVICE);
nMgr.cancel(notificationId);
if (notificationId == Constants.NEWROMVERSION_NOTIFICATION_ID) {
notificationId = Constants.DOWNLOADROM_NOTIFICATION_ID;
} else {
notificationId = Constants.DOWNLOADGAPPS_NOTIFICATION_ID;
}
return info;
} else if (notificationId == Constants.DOWNLOADROM_NOTIFICATION_ID
|| notificationId == Constants.DOWNLOADGAPPS_NOTIFICATION_ID
|| notificationId == Constants.DOWNLOADTWRP_NOTIFICATION_ID) {
switch (notificationId) {
case Constants.DOWNLOADROM_NOTIFICATION_ID:
case Constants.DOWNLOADGAPPS_NOTIFICATION_ID:
if (fileInfo.status == Status.FINISHED && fileInfo.downloadStatus == DownloadStatus.FINISHED) {
if (addItem(fileInfo.path)) {
Toast.makeText(context, R.string.install_file_manager_zip_added,
Toast.LENGTH_LONG).show();
}
NotificationManager nMgr = (NotificationManager) context
.getSystemService(Context.NOTIFICATION_SERVICE);
nMgr.cancel(notificationId);
return null;
}
break;
case Constants.DOWNLOADTWRP_NOTIFICATION_ID:
if (fileInfo.status == Status.FINISHED && fileInfo.downloadStatus == DownloadStatus.FINISHED) {
new TWRPUpdater(context, null).installTWRP(fileInfo.file, fileInfo.md5);
}
break;
}
if (fileInfo.status != Status.FINISHED && fileInfo.downloadStatus != DownloadStatus.FINISHED) {
cancelDownload(context, notificationId, fileInfo);
return new CancelPackage();
}
}
return null;
}
public boolean addItem(String filePath) {
if (filePath == null || !filePath.endsWith(".zip")) {
Toast.makeText(mContext, R.string.install_file_manager_invalid_zip, Toast.LENGTH_SHORT)
.show();
return false;
}
PreferencesManager pManager = ManagerFactory.getPreferencesManager(mContext);
String sdcardPath = new String(filePath);
String internalStorage = pManager.getInternalStorage();
String externalStorage = pManager.getExternalStorage();
String[] internalNames = new String[] { mInternalStoragePath, "/mnt/sdcard", "/sdcard" };
String[] externalNames = new String[] {
mExternalStoragePath == null ? " " : mExternalStoragePath,
"/mnt/extSdCard",
"/extSdCard" };
for (int i = 0; i < internalNames.length; i++) {
String internalName = internalNames[i];
String externalName = externalNames[i];
boolean external = isExternalStorage(filePath);
if (external) {
if (filePath.startsWith(externalName)) {
filePath = filePath.replace(externalName, "/" + externalStorage);
}
} else {
if (filePath.startsWith(internalName)) {
filePath = filePath.replace(internalName, "/" + internalStorage);
}
}
}
File file = new File(sdcardPath);
if (!file.exists()) {
Toast.makeText(mContext, R.string.install_file_manager_not_found_zip, Toast.LENGTH_LONG)
.show();
return false;
} else {
for (FileItem item : mItems) {
if (item.getKey().equals(filePath)) {
mItems.remove(item);
break;
}
}
FileItem item = new FileItem(filePath,
sdcardPath.substring(sdcardPath.lastIndexOf("/") + 1), sdcardPath);
mItems.add(item);
pManager.addFlashQueue(item.toString());
}
return true;
}
public void showDeleteDialog(final Context context) {
AlertDialog.Builder alert = new AlertDialog.Builder(context);
alert.setTitle(R.string.alert_delete_title);
final String backupFolder = ManagerFactory.getRecoveryManager(context).getBackupDir(true);
final String[] backups = ManagerFactory.getRecoveryManager(context).getBackupList();
mSelectedBackup = backups.length > 0 ? 0 : -1;
alert.setSingleChoiceItems(backups, mSelectedBackup, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mSelectedBackup = which;
}
});
alert.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
if (mSelectedBackup >= 0) {
final String toDelete = backupFolder + backups[mSelectedBackup];
final ProgressDialog pDialog = new ProgressDialog(context);
pDialog.setIndeterminate(true);
pDialog.setMessage(context.getResources().getString(
R.string.alert_deleting_folder,
new Object[] { backups[mSelectedBackup] }));
pDialog.setCancelable(false);
pDialog.setCanceledOnTouchOutside(false);
pDialog.show();
(new Thread() {
public void run() {
recursiveDelete(new File(toDelete));
pDialog.dismiss();
}
}).start();
}
}
});
alert.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
alert.show();
}
public void selectDownloadPath(final Activity activity) {
final EditText input = new EditText(activity);
input.setText(ManagerFactory.getPreferencesManager(activity).getDownloadPath());
new AlertDialog.Builder(activity)
.setTitle(R.string.download_alert_title)
.setMessage(R.string.download_alert_summary)
.setView(input)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
String value = input.getText().toString();
if (value == null || "".equals(value.trim()) || !value.startsWith("/")) {
Toast.makeText(activity, R.string.download_alert_error,
Toast.LENGTH_SHORT).show();
dialog.dismiss();
return;
}
ManagerFactory.getPreferencesManager(activity).setDownloadPath(value);
dialog.dismiss();
}
})
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
}
}).show();
}
public void download(Context context, String url, String fileName, String md5, boolean isDelta,
int notificationId) {
Intent intent = new Intent(context, DownloadService.class);
DownloadService.FileInfo info = new DownloadService.FileInfo();
info.notificationId = notificationId;
info.url = url;
info.fileName = fileName;
info.md5 = md5;
info.isDelta = isDelta;
intent.putExtra(Constants.FILE_INFO, info);
context.startService(intent);
}
public void cancelDownload(final Context context, final int notificationId,
DownloadService.FileInfo fileInfo) {
switch (notificationId) {
case Constants.DOWNLOADROM_NOTIFICATION_ID:
case Constants.DOWNLOADGAPPS_NOTIFICATION_ID:
case Constants.DOWNLOADTWRP_NOTIFICATION_ID:
if (fileInfo.status == Status.FINISHED) {
return;
}
break;
}
new AlertDialog.Builder(context)
.setTitle(R.string.download_cancel_title)
.setMessage(R.string.download_cancel_message)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
NotificationManager nMgr = (NotificationManager) context
.getSystemService(Context.NOTIFICATION_SERVICE);
nMgr.cancel(notificationId);
Intent intent = new Intent(context, DownloadService.class);
context.stopService(intent);
}
})
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
dialog.dismiss();
}
}).show();
}
public boolean recursiveDelete(File f) {
try {
if (f.isDirectory()) {
File[] files = f.listFiles();
for (int i = 0; i < files.length; i++) {
if (!recursiveDelete(files[i])) {
return false;
}
}
if (!f.delete()) {
return false;
}
} else {
if (!f.delete()) {
return false;
}
}
} catch (Exception ignore) {
}
return true;
}
public boolean writeToFile(String data, String path, String fileName) {
File folder = new File(path);
File file = new File(folder, fileName);
folder.mkdirs();
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
fos.write(data.getBytes());
return true;
} catch (IOException ex) {
ex.printStackTrace();
return false;
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException ex) {
}
}
}
}
public String readAssets(Context contex, String fileName) {
BufferedReader in = null;
StringBuilder data = null;
try {
data = new StringBuilder(2048);
char[] buf = new char[2048];
int nRead = -1;
in = new BufferedReader(new InputStreamReader(contex.getAssets().open(fileName)));
while ((nRead = in.read(buf)) != -1) {
data.append(buf, 0, nRead);
}
} catch (IOException ex) {
ex.printStackTrace();
return null;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
}
}
}
if (TextUtils.isEmpty(data)) {
return null;
}
return data.toString();
}
public boolean hasExternalStorage() {
return Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState());
}
public boolean isExternalStorage(String path) {
return !path.startsWith(mInternalStoragePath) && !path.startsWith("/sdcard")
&& !path.startsWith("/mnt/sdcard");
}
private void readMounts() {
ArrayList<String> mounts = new ArrayList<String>();
ArrayList<String> vold = new ArrayList<String>();
try {
Scanner scanner = new Scanner(new File("/proc/mounts"));
while (scanner.hasNext()) {
String line = scanner.nextLine();
if (line.startsWith("/dev/block/vold/")) {
String[] lineElements = line.split(" ");
String element = lineElements[1];
mounts.add(element);
}
}
} catch (Exception e) {
e.printStackTrace();
}
if (mounts.size() == 0 || (mounts.size() == 1 && hasExternalStorage())) {
mounts.add("/mnt/sdcard");
}
try {
Scanner scanner = new Scanner(new File("/system/etc/vold.fstab"));
while (scanner.hasNext()) {
String line = scanner.nextLine();
if (line.startsWith("dev_mount")) {
String[] lineElements = line.split(" ");
String element = lineElements[2];
if (element.contains(":")) {
element = element.substring(0, element.indexOf(":"));
}
if (element.toLowerCase().indexOf("usb") < 0) {
vold.add(element);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
if (vold.size() == 0 || (vold.size() == 1 && hasExternalStorage())) {
vold.add("/mnt/sdcard");
}
for (int i = 0; i < mounts.size(); i++) {
String mount = mounts.get(i);
File root = new File(mount);
if (!vold.contains(mount)
|| (!root.exists() || !root.isDirectory() || !root.canWrite())) {
mounts.remove(i--);
}
}
for (int i = 0; i < mounts.size(); i++) {
String mount = mounts.get(i);
if (mount.indexOf("sdcard0") >= 0 || mount.equalsIgnoreCase("/mnt/sdcard")
|| mount.equalsIgnoreCase("/sdcard")) {
mInternalStoragePath = mount;
} else {
mExternalStoragePath = mount;
}
}
if (mInternalStoragePath == null) {
mInternalStoragePath = "/sdcard";
}
}
public void addFilesToZip(File inFile, File outFile, File[] files, String folder) throws IOException {
byte[] buf = new byte[1024];
ZipOutputStream out = null;
try {
out = new ZipOutputStream(new FileOutputStream(outFile));
ZipInputStream in = null;
try {
in = new ZipInputStream(new FileInputStream(inFile));
ZipEntry entry = in.getNextEntry();
while (entry != null) {
String name = entry.getName();
boolean alreadyAdded = false;
for (File f : files) {
if (f.getName().equals(name)) {
alreadyAdded = true;
break;
}
}
if (!alreadyAdded) {
out.putNextEntry(new ZipEntry(name));
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
}
entry = in.getNextEntry();
}
} finally {
if (in != null) {
in.close();
}
}
for (int i = 0; i < files.length; i++) {
InputStream fin = null;
try {
fin = new FileInputStream(files[i]);
String file = files[i].getAbsolutePath();
file = file.replace(folder, "");
out.putNextEntry(new ZipEntry(file));
int len;
while ((len = fin.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.closeEntry();
} finally {
if (fin != null) {
fin.close();
}
}
}
} finally {
if (out != null) {
out.close();
}
inFile.delete();
}
}
public void read(InputStream in, OutputStream out) throws IOException {
try {
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf, 0, buf.length)) != -1) {
out.write(buf, 0, len);
}
} finally {
in.close();
out.close();
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesisanalytics-2015-08-14/UntagResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UntagResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARN of the Kinesis Analytics application from which to remove the tags.
* </p>
*/
private String resourceARN;
/**
* <p>
* A list of keys of tags to remove from the specified application.
* </p>
*/
private java.util.List<String> tagKeys;
/**
* <p>
* The ARN of the Kinesis Analytics application from which to remove the tags.
* </p>
*
* @param resourceARN
* The ARN of the Kinesis Analytics application from which to remove the tags.
*/
public void setResourceARN(String resourceARN) {
this.resourceARN = resourceARN;
}
/**
* <p>
* The ARN of the Kinesis Analytics application from which to remove the tags.
* </p>
*
* @return The ARN of the Kinesis Analytics application from which to remove the tags.
*/
public String getResourceARN() {
return this.resourceARN;
}
/**
* <p>
* The ARN of the Kinesis Analytics application from which to remove the tags.
* </p>
*
* @param resourceARN
* The ARN of the Kinesis Analytics application from which to remove the tags.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UntagResourceRequest withResourceARN(String resourceARN) {
setResourceARN(resourceARN);
return this;
}
/**
* <p>
* A list of keys of tags to remove from the specified application.
* </p>
*
* @return A list of keys of tags to remove from the specified application.
*/
public java.util.List<String> getTagKeys() {
return tagKeys;
}
/**
* <p>
* A list of keys of tags to remove from the specified application.
* </p>
*
* @param tagKeys
* A list of keys of tags to remove from the specified application.
*/
public void setTagKeys(java.util.Collection<String> tagKeys) {
if (tagKeys == null) {
this.tagKeys = null;
return;
}
this.tagKeys = new java.util.ArrayList<String>(tagKeys);
}
/**
* <p>
* A list of keys of tags to remove from the specified application.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTagKeys(java.util.Collection)} or {@link #withTagKeys(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param tagKeys
* A list of keys of tags to remove from the specified application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UntagResourceRequest withTagKeys(String... tagKeys) {
if (this.tagKeys == null) {
setTagKeys(new java.util.ArrayList<String>(tagKeys.length));
}
for (String ele : tagKeys) {
this.tagKeys.add(ele);
}
return this;
}
/**
* <p>
* A list of keys of tags to remove from the specified application.
* </p>
*
* @param tagKeys
* A list of keys of tags to remove from the specified application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UntagResourceRequest withTagKeys(java.util.Collection<String> tagKeys) {
setTagKeys(tagKeys);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourceARN() != null)
sb.append("ResourceARN: ").append(getResourceARN()).append(",");
if (getTagKeys() != null)
sb.append("TagKeys: ").append(getTagKeys());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UntagResourceRequest == false)
return false;
UntagResourceRequest other = (UntagResourceRequest) obj;
if (other.getResourceARN() == null ^ this.getResourceARN() == null)
return false;
if (other.getResourceARN() != null && other.getResourceARN().equals(this.getResourceARN()) == false)
return false;
if (other.getTagKeys() == null ^ this.getTagKeys() == null)
return false;
if (other.getTagKeys() != null && other.getTagKeys().equals(this.getTagKeys()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getResourceARN() == null) ? 0 : getResourceARN().hashCode());
hashCode = prime * hashCode + ((getTagKeys() == null) ? 0 : getTagKeys().hashCode());
return hashCode;
}
@Override
public UntagResourceRequest clone() {
return (UntagResourceRequest) super.clone();
}
}
| |
package org.spongycastle.pqc.crypto.rainbow.util;
/**
* This class is needed for the conversions while encoding and decoding, as well as for
* comparison between arrays of some dimensions
*/
public class RainbowUtil
{
/**
* This function converts an one-dimensional array of bytes into a
* one-dimensional array of int
*
* @param in the array to be converted
* @return out
* the one-dimensional int-array that corresponds the input
*/
public static int[] convertArraytoInt(byte[] in)
{
int[] out = new int[in.length];
for (int i = 0; i < in.length; i++)
{
out[i] = in[i] & GF2Field.MASK;
}
return out;
}
/**
* This function converts an one-dimensional array of bytes into a
* one-dimensional array of type short
*
* @param in the array to be converted
* @return out
* one-dimensional short-array that corresponds the input
*/
public static short[] convertArray(byte[] in)
{
short[] out = new short[in.length];
for (int i = 0; i < in.length; i++)
{
out[i] = (short)(in[i] & GF2Field.MASK);
}
return out;
}
/**
* This function converts a matrix of bytes into a matrix of type short
*
* @param in the matrix to be converted
* @return out
* short-matrix that corresponds the input
*/
public static short[][] convertArray(byte[][] in)
{
short[][] out = new short[in.length][in[0].length];
for (int i = 0; i < in.length; i++)
{
for (int j = 0; j < in[0].length; j++)
{
out[i][j] = (short)(in[i][j] & GF2Field.MASK);
}
}
return out;
}
/**
* This function converts a 3-dimensional array of bytes into a 3-dimensional array of type short
*
* @param in the array to be converted
* @return out
* short-array that corresponds the input
*/
public static short[][][] convertArray(byte[][][] in)
{
short[][][] out = new short[in.length][in[0].length][in[0][0].length];
for (int i = 0; i < in.length; i++)
{
for (int j = 0; j < in[0].length; j++)
{
for (int k = 0; k < in[0][0].length; k++)
{
out[i][j][k] = (short)(in[i][j][k] & GF2Field.MASK);
}
}
}
return out;
}
/**
* This function converts an array of type int into an array of type byte
*
* @param in the array to be converted
* @return out
* the byte-array that corresponds the input
*/
public static byte[] convertIntArray(int[] in)
{
byte[] out = new byte[in.length];
for (int i = 0; i < in.length; i++)
{
out[i] = (byte)in[i];
}
return out;
}
/**
* This function converts an array of type short into an array of type byte
*
* @param in the array to be converted
* @return out
* the byte-array that corresponds the input
*/
public static byte[] convertArray(short[] in)
{
byte[] out = new byte[in.length];
for (int i = 0; i < in.length; i++)
{
out[i] = (byte)in[i];
}
return out;
}
/**
* This function converts a matrix of type short into a matrix of type byte
*
* @param in the matrix to be converted
* @return out
* the byte-matrix that corresponds the input
*/
public static byte[][] convertArray(short[][] in)
{
byte[][] out = new byte[in.length][in[0].length];
for (int i = 0; i < in.length; i++)
{
for (int j = 0; j < in[0].length; j++)
{
out[i][j] = (byte)in[i][j];
}
}
return out;
}
/**
* This function converts a 3-dimensional array of type short into a 3-dimensional array of type byte
*
* @param in the array to be converted
* @return out
* the byte-array that corresponds the input
*/
public static byte[][][] convertArray(short[][][] in)
{
byte[][][] out = new byte[in.length][in[0].length][in[0][0].length];
for (int i = 0; i < in.length; i++)
{
for (int j = 0; j < in[0].length; j++)
{
for (int k = 0; k < in[0][0].length; k++)
{
out[i][j][k] = (byte)in[i][j][k];
}
}
}
return out;
}
/**
* Compare two short arrays. No null checks are performed.
*
* @param left the first short array
* @param right the second short array
* @return the result of the comparison
*/
public static boolean equals(short[] left, short[] right)
{
if (left.length != right.length)
{
return false;
}
boolean result = true;
for (int i = left.length - 1; i >= 0; i--)
{
result &= left[i] == right[i];
}
return result;
}
/**
* Compare two two-dimensional short arrays. No null checks are performed.
*
* @param left the first short array
* @param right the second short array
* @return the result of the comparison
*/
public static boolean equals(short[][] left, short[][] right)
{
if (left.length != right.length)
{
return false;
}
boolean result = true;
for (int i = left.length - 1; i >= 0; i--)
{
result &= equals(left[i], right[i]);
}
return result;
}
/**
* Compare two three-dimensional short arrays. No null checks are performed.
*
* @param left the first short array
* @param right the second short array
* @return the result of the comparison
*/
public static boolean equals(short[][][] left, short[][][] right)
{
if (left.length != right.length)
{
return false;
}
boolean result = true;
for (int i = left.length - 1; i >= 0; i--)
{
result &= equals(left[i], right[i]);
}
return result;
}
}
| |
/*
* Copyright 2014 Soichiro Kashima
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.ksoichiro.android.observablescrollview.samples;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.SimpleAdapter;
import android.widget.Spinner;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MainActivity extends AppCompatActivity implements AdapterView.OnItemClickListener {
private static final String CATEGORY_SAMPLES = MainActivity.class.getPackage().getName();
private static final String TAG_CLASS_NAME = "className";
private static final String TAG_DESCRIPTION = "description";
private static final String TAG_INTENT = "intent";
private static final Comparator<Map<String, Object>> DISPLAY_NAME_COMPARATOR = new Comparator<Map<String, Object>>() {
private final Collator collator = Collator.getInstance();
@Override
public int compare(Map<String, Object> lhs, Map<String, Object> rhs) {
return collator.compare(lhs.get("className"), rhs.get("className"));
}
};
private ListView listView;
// Quickly navigate through the examples.
static enum Filter {
All,
GridView,
RecyclerView,
ScrollView,
ListView,
WebView,
Toolbar,
ActionBar,
FlexibleSpace,
Parallax,
ViewPager,
}
Filter currentFilter = Filter.All;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
listView = (ListView) findViewById(android.R.id.list);
listView.setOnItemClickListener(this);
Spinner spinner = (Spinner) findViewById(R.id.spinner_toolbar);
spinner.setAdapter(new FilterAdapter(this));
spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
currentFilter = Filter.values()[position];
refreshData();
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
}
private void refreshData() {
listView.setAdapter(new SimpleAdapter(this, getData(),
R.layout.list_item_main,
new String[]{TAG_CLASS_NAME, TAG_DESCRIPTION,},
new int[]{R.id.className, R.id.description,}));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(final MenuItem menu) {
int id = menu.getItemId();
if (id == R.id.menu_about) {
startActivity(new Intent(getApplicationContext(), AboutActivity.class));
return true;
}
return false;
}
private List<Map<String, Object>> getData() {
List<Map<String, Object>> data = new ArrayList<Map<String, Object>>();
Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.setPackage(getApplicationContext().getPackageName());
mainIntent.addCategory(CATEGORY_SAMPLES);
PackageManager pm = getPackageManager();
List<ResolveInfo> list = pm.queryIntentActivities(mainIntent, 0);
if (list == null) {
return data;
}
for (ResolveInfo info : list) {
CharSequence labelSeq = info.loadLabel(pm);
String label = labelSeq != null
? labelSeq.toString()
: info.activityInfo.name;
String[] labelPath = label.split("/");
String nextLabel = labelPath[0];
if (labelPath.length == 1) {
String nameLabel = info.activityInfo.name.replace(info.activityInfo.packageName + "", "");
// Remove package and get simple class name
if (nameLabel.contains(".")) {
nameLabel = nameLabel.replaceAll("[^.]*\\.", "");
}
// Filter logic.
if (currentFilter == Filter.All || nameLabel.contains(currentFilter.name())) {
addItem(data,
nameLabel,
nextLabel,
activityIntent(
info.activityInfo.applicationInfo.packageName,
info.activityInfo.name));
}
}
}
Collections.sort(data, DISPLAY_NAME_COMPARATOR);
return data;
}
protected Intent activityIntent(String pkg, String componentName) {
Intent result = new Intent();
result.setClassName(pkg, componentName);
return result;
}
protected void addItem(List<Map<String, Object>> data, String className, String description,
Intent intent) {
Map<String, Object> temp = new HashMap<String, Object>();
temp.put(TAG_CLASS_NAME, className);
temp.put(TAG_DESCRIPTION, description);
temp.put(TAG_INTENT, intent);
data.add(temp);
}
@SuppressWarnings("unchecked")
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Map<String, Object> map = (Map<String, Object>) parent.getItemAtPosition(position);
Intent intent = (Intent) map.get(TAG_INTENT);
startActivity(intent);
}
private class FilterAdapter extends ArrayAdapter<Filter> {
public FilterAdapter(Context context) {
super(context, android.R.layout.simple_spinner_item, Filter.values());
setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
}
}
}
| |
/*
* Xero Payroll AU API
* This is the Xero Payroll API for orgs in Australia region.
*
* Contact: api@xero.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.xero.models.payrollau;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.xero.api.StringUtil;
import io.swagger.annotations.ApiModelProperty;
import java.util.Objects;
import java.util.UUID;
/** LeaveLine */
public class LeaveLine {
StringUtil util = new StringUtil();
@JsonProperty("LeaveTypeID")
private UUID leaveTypeID;
@JsonProperty("CalculationType")
private LeaveLineCalculationType calculationType;
@JsonProperty("EntitlementFinalPayPayoutType")
private EntitlementFinalPayPayoutType entitlementFinalPayPayoutType;
@JsonProperty("EmploymentTerminationPaymentType")
private EmploymentTerminationPaymentType employmentTerminationPaymentType;
@JsonProperty("IncludeSuperannuationGuaranteeContribution")
private Boolean includeSuperannuationGuaranteeContribution;
@JsonProperty("NumberOfUnits")
private Double numberOfUnits;
@JsonProperty("AnnualNumberOfUnits")
private Double annualNumberOfUnits;
@JsonProperty("FullTimeNumberOfUnitsPerPeriod")
private Double fullTimeNumberOfUnitsPerPeriod;
/**
* Xero leave type identifier
*
* @param leaveTypeID UUID
* @return LeaveLine
*/
public LeaveLine leaveTypeID(UUID leaveTypeID) {
this.leaveTypeID = leaveTypeID;
return this;
}
/**
* Xero leave type identifier
*
* @return leaveTypeID
*/
@ApiModelProperty(
example = "742998cb-7584-4ecf-aa88-d694f59c50f9",
value = "Xero leave type identifier")
/**
* Xero leave type identifier
*
* @return leaveTypeID UUID
*/
public UUID getLeaveTypeID() {
return leaveTypeID;
}
/**
* Xero leave type identifier
*
* @param leaveTypeID UUID
*/
public void setLeaveTypeID(UUID leaveTypeID) {
this.leaveTypeID = leaveTypeID;
}
/**
* calculationType
*
* @param calculationType LeaveLineCalculationType
* @return LeaveLine
*/
public LeaveLine calculationType(LeaveLineCalculationType calculationType) {
this.calculationType = calculationType;
return this;
}
/**
* Get calculationType
*
* @return calculationType
*/
@ApiModelProperty(value = "")
/**
* calculationType
*
* @return calculationType LeaveLineCalculationType
*/
public LeaveLineCalculationType getCalculationType() {
return calculationType;
}
/**
* calculationType
*
* @param calculationType LeaveLineCalculationType
*/
public void setCalculationType(LeaveLineCalculationType calculationType) {
this.calculationType = calculationType;
}
/**
* entitlementFinalPayPayoutType
*
* @param entitlementFinalPayPayoutType EntitlementFinalPayPayoutType
* @return LeaveLine
*/
public LeaveLine entitlementFinalPayPayoutType(
EntitlementFinalPayPayoutType entitlementFinalPayPayoutType) {
this.entitlementFinalPayPayoutType = entitlementFinalPayPayoutType;
return this;
}
/**
* Get entitlementFinalPayPayoutType
*
* @return entitlementFinalPayPayoutType
*/
@ApiModelProperty(value = "")
/**
* entitlementFinalPayPayoutType
*
* @return entitlementFinalPayPayoutType EntitlementFinalPayPayoutType
*/
public EntitlementFinalPayPayoutType getEntitlementFinalPayPayoutType() {
return entitlementFinalPayPayoutType;
}
/**
* entitlementFinalPayPayoutType
*
* @param entitlementFinalPayPayoutType EntitlementFinalPayPayoutType
*/
public void setEntitlementFinalPayPayoutType(
EntitlementFinalPayPayoutType entitlementFinalPayPayoutType) {
this.entitlementFinalPayPayoutType = entitlementFinalPayPayoutType;
}
/**
* employmentTerminationPaymentType
*
* @param employmentTerminationPaymentType EmploymentTerminationPaymentType
* @return LeaveLine
*/
public LeaveLine employmentTerminationPaymentType(
EmploymentTerminationPaymentType employmentTerminationPaymentType) {
this.employmentTerminationPaymentType = employmentTerminationPaymentType;
return this;
}
/**
* Get employmentTerminationPaymentType
*
* @return employmentTerminationPaymentType
*/
@ApiModelProperty(value = "")
/**
* employmentTerminationPaymentType
*
* @return employmentTerminationPaymentType EmploymentTerminationPaymentType
*/
public EmploymentTerminationPaymentType getEmploymentTerminationPaymentType() {
return employmentTerminationPaymentType;
}
/**
* employmentTerminationPaymentType
*
* @param employmentTerminationPaymentType EmploymentTerminationPaymentType
*/
public void setEmploymentTerminationPaymentType(
EmploymentTerminationPaymentType employmentTerminationPaymentType) {
this.employmentTerminationPaymentType = employmentTerminationPaymentType;
}
/**
* amount of leave line
*
* @param includeSuperannuationGuaranteeContribution Boolean
* @return LeaveLine
*/
public LeaveLine includeSuperannuationGuaranteeContribution(
Boolean includeSuperannuationGuaranteeContribution) {
this.includeSuperannuationGuaranteeContribution = includeSuperannuationGuaranteeContribution;
return this;
}
/**
* amount of leave line
*
* @return includeSuperannuationGuaranteeContribution
*/
@ApiModelProperty(example = "true", value = "amount of leave line")
/**
* amount of leave line
*
* @return includeSuperannuationGuaranteeContribution Boolean
*/
public Boolean getIncludeSuperannuationGuaranteeContribution() {
return includeSuperannuationGuaranteeContribution;
}
/**
* amount of leave line
*
* @param includeSuperannuationGuaranteeContribution Boolean
*/
public void setIncludeSuperannuationGuaranteeContribution(
Boolean includeSuperannuationGuaranteeContribution) {
this.includeSuperannuationGuaranteeContribution = includeSuperannuationGuaranteeContribution;
}
/**
* Number of units for leave line.
*
* @param numberOfUnits Double
* @return LeaveLine
*/
public LeaveLine numberOfUnits(Double numberOfUnits) {
this.numberOfUnits = numberOfUnits;
return this;
}
/**
* Number of units for leave line.
*
* @return numberOfUnits
*/
@ApiModelProperty(example = "2.5", value = "Number of units for leave line.")
/**
* Number of units for leave line.
*
* @return numberOfUnits Double
*/
public Double getNumberOfUnits() {
return numberOfUnits;
}
/**
* Number of units for leave line.
*
* @param numberOfUnits Double
*/
public void setNumberOfUnits(Double numberOfUnits) {
this.numberOfUnits = numberOfUnits;
}
/**
* Hours of leave accrued each year
*
* @param annualNumberOfUnits Double
* @return LeaveLine
*/
public LeaveLine annualNumberOfUnits(Double annualNumberOfUnits) {
this.annualNumberOfUnits = annualNumberOfUnits;
return this;
}
/**
* Hours of leave accrued each year
*
* @return annualNumberOfUnits
*/
@ApiModelProperty(example = "2.5", value = "Hours of leave accrued each year")
/**
* Hours of leave accrued each year
*
* @return annualNumberOfUnits Double
*/
public Double getAnnualNumberOfUnits() {
return annualNumberOfUnits;
}
/**
* Hours of leave accrued each year
*
* @param annualNumberOfUnits Double
*/
public void setAnnualNumberOfUnits(Double annualNumberOfUnits) {
this.annualNumberOfUnits = annualNumberOfUnits;
}
/**
* Normal ordinary earnings number of units for leave line.
*
* @param fullTimeNumberOfUnitsPerPeriod Double
* @return LeaveLine
*/
public LeaveLine fullTimeNumberOfUnitsPerPeriod(Double fullTimeNumberOfUnitsPerPeriod) {
this.fullTimeNumberOfUnitsPerPeriod = fullTimeNumberOfUnitsPerPeriod;
return this;
}
/**
* Normal ordinary earnings number of units for leave line.
*
* @return fullTimeNumberOfUnitsPerPeriod
*/
@ApiModelProperty(
example = "2.5",
value = "Normal ordinary earnings number of units for leave line.")
/**
* Normal ordinary earnings number of units for leave line.
*
* @return fullTimeNumberOfUnitsPerPeriod Double
*/
public Double getFullTimeNumberOfUnitsPerPeriod() {
return fullTimeNumberOfUnitsPerPeriod;
}
/**
* Normal ordinary earnings number of units for leave line.
*
* @param fullTimeNumberOfUnitsPerPeriod Double
*/
public void setFullTimeNumberOfUnitsPerPeriod(Double fullTimeNumberOfUnitsPerPeriod) {
this.fullTimeNumberOfUnitsPerPeriod = fullTimeNumberOfUnitsPerPeriod;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LeaveLine leaveLine = (LeaveLine) o;
return Objects.equals(this.leaveTypeID, leaveLine.leaveTypeID)
&& Objects.equals(this.calculationType, leaveLine.calculationType)
&& Objects.equals(
this.entitlementFinalPayPayoutType, leaveLine.entitlementFinalPayPayoutType)
&& Objects.equals(
this.employmentTerminationPaymentType, leaveLine.employmentTerminationPaymentType)
&& Objects.equals(
this.includeSuperannuationGuaranteeContribution,
leaveLine.includeSuperannuationGuaranteeContribution)
&& Objects.equals(this.numberOfUnits, leaveLine.numberOfUnits)
&& Objects.equals(this.annualNumberOfUnits, leaveLine.annualNumberOfUnits)
&& Objects.equals(
this.fullTimeNumberOfUnitsPerPeriod, leaveLine.fullTimeNumberOfUnitsPerPeriod);
}
@Override
public int hashCode() {
return Objects.hash(
leaveTypeID,
calculationType,
entitlementFinalPayPayoutType,
employmentTerminationPaymentType,
includeSuperannuationGuaranteeContribution,
numberOfUnits,
annualNumberOfUnits,
fullTimeNumberOfUnitsPerPeriod);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class LeaveLine {\n");
sb.append(" leaveTypeID: ").append(toIndentedString(leaveTypeID)).append("\n");
sb.append(" calculationType: ").append(toIndentedString(calculationType)).append("\n");
sb.append(" entitlementFinalPayPayoutType: ")
.append(toIndentedString(entitlementFinalPayPayoutType))
.append("\n");
sb.append(" employmentTerminationPaymentType: ")
.append(toIndentedString(employmentTerminationPaymentType))
.append("\n");
sb.append(" includeSuperannuationGuaranteeContribution: ")
.append(toIndentedString(includeSuperannuationGuaranteeContribution))
.append("\n");
sb.append(" numberOfUnits: ").append(toIndentedString(numberOfUnits)).append("\n");
sb.append(" annualNumberOfUnits: ")
.append(toIndentedString(annualNumberOfUnits))
.append("\n");
sb.append(" fullTimeNumberOfUnitsPerPeriod: ")
.append(toIndentedString(fullTimeNumberOfUnitsPerPeriod))
.append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
package de.danoeh.antennapod.fragment;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Parcelable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.SearchView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.mobeta.android.dslv.DragSortListView;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import de.danoeh.antennapod.R;
import de.danoeh.antennapod.activity.MainActivity;
import de.danoeh.antennapod.adapter.DefaultActionButtonCallback;
import de.danoeh.antennapod.adapter.NewEpisodesListAdapter;
import de.danoeh.antennapod.core.asynctask.DownloadObserver;
import de.danoeh.antennapod.core.dialog.ConfirmationDialog;
import de.danoeh.antennapod.core.feed.EventDistributor;
import de.danoeh.antennapod.core.feed.Feed;
import de.danoeh.antennapod.core.feed.FeedItem;
import de.danoeh.antennapod.core.feed.FeedMedia;
import de.danoeh.antennapod.core.preferences.UserPreferences;
import de.danoeh.antennapod.core.service.download.DownloadService;
import de.danoeh.antennapod.core.service.download.Downloader;
import de.danoeh.antennapod.core.storage.DBReader;
import de.danoeh.antennapod.core.storage.DBTasks;
import de.danoeh.antennapod.core.storage.DBWriter;
import de.danoeh.antennapod.core.storage.DownloadRequester;
import de.danoeh.antennapod.core.util.QueueAccess;
import de.danoeh.antennapod.core.util.gui.FeedItemUndoToken;
import de.danoeh.antennapod.core.util.gui.UndoBarController;
import de.danoeh.antennapod.menuhandler.MenuItemUtils;
import de.danoeh.antennapod.menuhandler.NavDrawerActivity;
/**
* Shows unread or recently published episodes
*/
public class NewEpisodesFragment extends Fragment {
private static final String TAG = "NewEpisodesFragment";
private static final int EVENTS = EventDistributor.DOWNLOAD_HANDLED |
EventDistributor.DOWNLOAD_QUEUED |
EventDistributor.QUEUE_UPDATE |
EventDistributor.UNREAD_ITEMS_UPDATE |
EventDistributor.PLAYER_STATUS_UPDATE;
private static final int RECENT_EPISODES_LIMIT = 150;
private static final String PREF_NAME = "PrefNewEpisodesFragment";
private static final String PREF_EPISODE_FILTER_BOOL = "newEpisodeFilterEnabled";
private static final String PREF_KEY_LIST_TOP = "list_top";
private static final String PREF_KEY_LIST_SELECTION = "list_selection";
private DragSortListView listView;
private NewEpisodesListAdapter listAdapter;
private TextView txtvEmpty;
private ProgressBar progLoading;
private UndoBarController undoBarController;
private List<FeedItem> unreadItems;
private List<FeedItem> recentItems;
private QueueAccess queueAccess;
private List<Downloader> downloaderList;
private boolean itemsLoaded = false;
private boolean viewsCreated = false;
private boolean showOnlyNewEpisodes = false;
private AtomicReference<MainActivity> activity = new AtomicReference<MainActivity>();
private DownloadObserver downloadObserver = null;
private boolean isUpdatingFeeds;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
setHasOptionsMenu(true);
updateShowOnlyEpisodes();
}
@Override
public void onResume() {
super.onResume();
startItemLoader();
}
@Override
public void onStart() {
super.onStart();
EventDistributor.getInstance().register(contentUpdate);
this.activity.set((MainActivity) getActivity());
if (downloadObserver != null) {
downloadObserver.setActivity(getActivity());
downloadObserver.onResume();
}
if (viewsCreated && itemsLoaded) {
onFragmentLoaded();
}
}
@Override
public void onPause() {
super.onPause();
saveScrollPosition();
}
@Override
public void onStop() {
super.onStop();
EventDistributor.getInstance().unregister(contentUpdate);
stopItemLoader();
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
this.activity.set((MainActivity) getActivity());
}
@Override
public void onDestroyView() {
super.onDestroyView();
resetViewState();
}
private void saveScrollPosition() {
SharedPreferences prefs = getActivity().getSharedPreferences(PREF_NAME, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
View v = listView.getChildAt(0);
int top = (v == null) ? 0 : (v.getTop() - listView.getPaddingTop());
editor.putInt(PREF_KEY_LIST_SELECTION, listView.getFirstVisiblePosition());
editor.putInt(PREF_KEY_LIST_TOP, top);
editor.commit();
}
private void restoreScrollPosition() {
SharedPreferences prefs = getActivity().getSharedPreferences(PREF_NAME, Context.MODE_PRIVATE);
int listSelection = prefs.getInt(PREF_KEY_LIST_SELECTION, 0);
int top = prefs.getInt(PREF_KEY_LIST_TOP, 0);
if(listSelection > 0 || top > 0) {
listView.setSelectionFromTop(listSelection, top);
// restore once, then forget
SharedPreferences.Editor editor = prefs.edit();
editor.putInt(PREF_KEY_LIST_SELECTION, 0);
editor.putInt(PREF_KEY_LIST_TOP, 0);
editor.commit();
}
}
private void resetViewState() {
listAdapter = null;
activity.set(null);
viewsCreated = false;
undoBarController = null;
if (downloadObserver != null) {
downloadObserver.onPause();
}
}
private final MenuItemUtils.UpdateRefreshMenuItemChecker updateRefreshMenuItemChecker = new MenuItemUtils.UpdateRefreshMenuItemChecker() {
@Override
public boolean isRefreshing() {
return DownloadService.isRunning && DownloadRequester.getInstance().isDownloadingFeeds();
}
};
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
if (itemsLoaded && !MenuItemUtils.isActivityDrawerOpen((NavDrawerActivity) getActivity())) {
inflater.inflate(R.menu.new_episodes, menu);
final SearchView sv = new SearchView(getActivity());
MenuItemUtils.addSearchItem(menu, sv);
sv.setQueryHint(getString(R.string.search_hint));
sv.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String s) {
sv.clearFocus();
((MainActivity) getActivity()).loadChildFragment(SearchFragment.newInstance(s));
return true;
}
@Override
public boolean onQueryTextChange(String s) {
return false;
}
});
isUpdatingFeeds = MenuItemUtils.updateRefreshMenuItem(menu, R.id.refresh_item, updateRefreshMenuItemChecker);
}
}
@Override
public void onPrepareOptionsMenu(Menu menu) {
super.onPrepareOptionsMenu(menu);
if (itemsLoaded && !MenuItemUtils.isActivityDrawerOpen((NavDrawerActivity) getActivity())) {
menu.findItem(R.id.mark_all_read_item).setVisible(unreadItems != null && !unreadItems.isEmpty());
menu.findItem(R.id.episode_filter_item).setChecked(showOnlyNewEpisodes);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (!super.onOptionsItemSelected(item)) {
switch (item.getItemId()) {
case R.id.refresh_item:
List<Feed> feeds = ((MainActivity) getActivity()).getFeeds();
if (feeds != null) {
DBTasks.refreshAllFeeds(getActivity(), feeds);
}
return true;
case R.id.mark_all_read_item:
ConfirmationDialog conDialog = new ConfirmationDialog(getActivity(),
R.string.mark_all_read_label,
R.string.mark_all_read_confirmation_msg) {
@Override
public void onConfirmButtonPressed(
DialogInterface dialog) {
dialog.dismiss();
DBWriter.markAllItemsRead(getActivity());
Toast.makeText(getActivity(), R.string.mark_all_read_msg, Toast.LENGTH_SHORT).show();
}
};
conDialog.createNewDialog().show();
return true;
case R.id.episode_filter_item:
boolean newVal = !item.isChecked();
setShowOnlyNewEpisodes(newVal);
item.setChecked(newVal);
return true;
default:
return false;
}
} else {
return true;
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
((MainActivity) getActivity()).getSupportActionBar().setTitle(R.string.new_episodes_label);
View root = inflater.inflate(R.layout.new_episodes_fragment, container, false);
listView = (DragSortListView) root.findViewById(android.R.id.list);
txtvEmpty = (TextView) root.findViewById(android.R.id.empty);
progLoading = (ProgressBar) root.findViewById(R.id.progLoading);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
FeedItem item = (FeedItem) listAdapter.getItem(position - listView.getHeaderViewsCount());
if (item != null) {
((MainActivity) getActivity()).loadChildFragment(ItemFragment.newInstance(item.getId()));
}
}
});
listView.setRemoveListener(new DragSortListView.RemoveListener() {
@Override
public void remove(int which) {
Log.d(TAG, "remove("+which+")");
stopItemLoader();
FeedItem item = (FeedItem) listView.getAdapter().getItem(which);
DBWriter.markItemRead(getActivity(), item.getId(), true);
undoBarController.showUndoBar(false,
getString(R.string.marked_as_read_label), new FeedItemUndoToken(item,
which)
);
}
});
undoBarController = new UndoBarController(root.findViewById(R.id.undobar), new UndoBarController.UndoListener() {
@Override
public void onUndo(Parcelable token) {
// Perform the undo
FeedItemUndoToken undoToken = (FeedItemUndoToken) token;
if (token != null) {
long itemId = undoToken.getFeedItemId();
int position = undoToken.getPosition();
DBWriter.markItemRead(getActivity(), itemId, false);
}
}
});
final int secondColor = (UserPreferences.getTheme() == R.style.Theme_AntennaPod_Dark) ? R.color.swipe_refresh_secondary_color_dark : R.color.swipe_refresh_secondary_color_light;
if (!itemsLoaded) {
progLoading.setVisibility(View.VISIBLE);
txtvEmpty.setVisibility(View.GONE);
}
viewsCreated = true;
if (itemsLoaded && activity.get() != null) {
onFragmentLoaded();
}
return root;
}
private void onFragmentLoaded() {
if (listAdapter == null) {
listAdapter = new NewEpisodesListAdapter(activity.get(), itemAccess, new DefaultActionButtonCallback(activity.get()));
listView.setAdapter(listAdapter);
listView.setEmptyView(txtvEmpty);
downloadObserver = new DownloadObserver(activity.get(), new Handler(), downloadObserverCallback);
downloadObserver.onResume();
}
listAdapter.notifyDataSetChanged();
restoreScrollPosition();
getActivity().supportInvalidateOptionsMenu();
updateShowOnlyEpisodesListViewState();
}
private DownloadObserver.Callback downloadObserverCallback = new DownloadObserver.Callback() {
@Override
public void onContentChanged() {
if (listAdapter != null) {
listAdapter.notifyDataSetChanged();
}
}
@Override
public void onDownloadDataAvailable(List<Downloader> downloaderList) {
NewEpisodesFragment.this.downloaderList = downloaderList;
if (listAdapter != null) {
listAdapter.notifyDataSetChanged();
}
}
};
private NewEpisodesListAdapter.ItemAccess itemAccess = new NewEpisodesListAdapter.ItemAccess() {
@Override
public int getCount() {
if (itemsLoaded) {
return (showOnlyNewEpisodes) ? unreadItems.size() : recentItems.size();
}
return 0;
}
@Override
public FeedItem getItem(int position) {
if (itemsLoaded) {
return (showOnlyNewEpisodes) ? unreadItems.get(position) : recentItems.get(position);
}
return null;
}
@Override
public int getItemDownloadProgressPercent(FeedItem item) {
if (downloaderList != null) {
for (Downloader downloader : downloaderList) {
if (downloader.getDownloadRequest().getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA
&& downloader.getDownloadRequest().getFeedfileId() == item.getMedia().getId()) {
return downloader.getDownloadRequest().getProgressPercent();
}
}
}
return 0;
}
@Override
public boolean isInQueue(FeedItem item) {
if (itemsLoaded) {
return queueAccess.contains(item.getId());
} else {
return false;
}
}
};
private EventDistributor.EventListener contentUpdate = new EventDistributor.EventListener() {
@Override
public void update(EventDistributor eventDistributor, Integer arg) {
if ((arg & EVENTS) != 0) {
startItemLoader();
if (isUpdatingFeeds != updateRefreshMenuItemChecker.isRefreshing()) {
getActivity().supportInvalidateOptionsMenu();
}
}
}
};
private void updateShowOnlyEpisodes() {
SharedPreferences prefs = getActivity().getSharedPreferences(PREF_NAME, Context.MODE_PRIVATE);
showOnlyNewEpisodes = prefs.getBoolean(PREF_EPISODE_FILTER_BOOL, true);
}
private void setShowOnlyNewEpisodes(boolean newVal) {
showOnlyNewEpisodes = newVal;
SharedPreferences prefs = getActivity().getSharedPreferences(PREF_NAME, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
editor.putBoolean(PREF_EPISODE_FILTER_BOOL, showOnlyNewEpisodes);
editor.commit();
if (itemsLoaded && viewsCreated) {
listAdapter.notifyDataSetChanged();
activity.get().supportInvalidateOptionsMenu();
updateShowOnlyEpisodesListViewState();
}
}
private void updateShowOnlyEpisodesListViewState() {
if (showOnlyNewEpisodes) {
listView.setEmptyView(null);
txtvEmpty.setVisibility(View.GONE);
} else {
listView.setEmptyView(txtvEmpty);
}
}
private ItemLoader itemLoader;
private void startItemLoader() {
if (itemLoader != null) {
itemLoader.cancel(true);
}
itemLoader = new ItemLoader();
itemLoader.execute();
}
private void stopItemLoader() {
if (itemLoader != null) {
itemLoader.cancel(true);
}
}
private class ItemLoader extends AsyncTask<Void, Void, Object[]> {
@Override
protected void onPreExecute() {
super.onPreExecute();
if (viewsCreated && !itemsLoaded) {
listView.setVisibility(View.GONE);
txtvEmpty.setVisibility(View.GONE);
progLoading.setVisibility(View.VISIBLE);
}
}
@Override
protected Object[] doInBackground(Void... params) {
Context context = activity.get();
if (context != null) {
return new Object[]{DBReader.getUnreadItemsList(context),
DBReader.getRecentlyPublishedEpisodes(context, RECENT_EPISODES_LIMIT),
QueueAccess.IDListAccess(DBReader.getQueueIDList(context))};
} else {
return null;
}
}
@Override
protected void onPostExecute(Object[] lists) {
super.onPostExecute(lists);
listView.setVisibility(View.VISIBLE);
progLoading.setVisibility(View.GONE);
if (lists != null) {
unreadItems = (List<FeedItem>) lists[0];
recentItems = (List<FeedItem>) lists[1];
queueAccess = (QueueAccess) lists[2];
itemsLoaded = true;
if (viewsCreated && activity.get() != null) {
onFragmentLoaded();
}
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.