gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.runtime; import org.apache.kafka.connect.runtime.rest.entities.ConfigInfos; import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo; import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo; import org.apache.kafka.connect.runtime.rest.entities.TaskInfo; import org.apache.kafka.connect.util.Callback; import org.apache.kafka.connect.util.ConnectorTaskId; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; /** * <p> * The herder interface tracks and manages workers and connectors. It is the main interface for external components * to make changes to the state of the cluster. For example, in distributed mode, an implementation of this class * knows how to accept a connector configuration, may need to route it to the current leader worker for the cluster so * the config can be written to persistent storage, and then ensures the new connector is correctly instantiated on one * of the workers. * </p> * <p> * This class must implement all the actions that can be taken on the cluster (add/remove connectors, pause/resume tasks, * get state of connectors and tasks, etc). The non-Java interfaces to the cluster (REST API and CLI) are very simple * wrappers of the functionality provided by this interface. * </p> * <p> * In standalone mode, this implementation of this class will be trivial because no coordination is needed. In that case, * the implementation will mainly be delegating tasks directly to other components. For example, when creating a new * connector in standalone mode, there is no need to persist the config and the connector and its tasks must run in the * same process, so the standalone herder implementation can immediately instantiate and start the connector and its * tasks. * </p> */ public interface Herder { void start(); void stop(); /** * Get a list of connectors currently running in this cluster. This is a full list of connectors in the cluster gathered * from the current configuration. However, note * * @returns A list of connector names * @throws org.apache.kafka.connect.runtime.distributed.RequestTargetException if this node can not resolve the request * (e.g., because it has not joined the cluster or does not have configs in sync with the group) and it is * not the leader or the task owner (e.g., task restart must be handled by the worker which owns the task) * @throws org.apache.kafka.connect.errors.ConnectException if this node is the leader, but still cannot resolve the * request (e.g., it is not in sync with other worker's config state) */ void connectors(Callback<Collection<String>> callback); /** * Get the definition and status of a connector. */ void connectorInfo(String connName, Callback<ConnectorInfo> callback); /** * Get the configuration for a connector. * @param connName name of the connector * @param callback callback to invoke with the configuration */ void connectorConfig(String connName, Callback<Map<String, String>> callback); /** * Set the configuration for a connector. This supports creation and updating. * @param connName name of the connector * @param config the connectors configuration, or null if deleting the connector * @param allowReplace if true, allow overwriting previous configs; if false, throw AlreadyExistsException if a connector * with the same name already exists * @param callback callback to invoke when the configuration has been written */ void putConnectorConfig(String connName, Map<String, String> config, boolean allowReplace, Callback<Created<ConnectorInfo>> callback); /** * Delete a connector and its configuration. * @param connName name of the connector * @param callback callback to invoke when the configuration has been written */ void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); /** * Requests reconfiguration of the task. This should only be triggered by * {@link HerderConnectorContext}. * * @param connName name of the connector that should be reconfigured */ void requestTaskReconfiguration(String connName); /** * Get the configurations for the current set of tasks of a connector. * @param connName connector to update * @param callback callback to invoke upon completion */ void taskConfigs(String connName, Callback<List<TaskInfo>> callback); /** * Set the configurations for the tasks of a connector. This should always include all tasks in the connector; if * there are existing configurations and fewer are provided, this will reduce the number of tasks, and if more are * provided it will increase the number of tasks. * @param connName connector to update * @param configs list of configurations * @param callback callback to invoke upon completion */ void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); /** * Lookup the current status of a connector. * @param connName name of the connector */ ConnectorStateInfo connectorStatus(String connName); /** * Lookup the status of the a task. * @param id id of the task */ ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); /** * Validate the provided connector config values against the configuration definition. * @param connectorConfig the provided connector config values */ ConfigInfos validateConnectorConfig(Map<String, String> connectorConfig); /** * Restart the task with the given id. * @param id id of the task * @param cb callback to invoke upon completion */ void restartTask(ConnectorTaskId id, Callback<Void> cb); /** * Restart the connector. * @param connName name of the connector * @param cb callback to invoke upon completion */ void restartConnector(String connName, Callback<Void> cb); /** * Pause the connector. This call will asynchronously suspend processing by the connector and all * of its tasks. * @param connector name of the connector */ void pauseConnector(String connector); /** * Resume the connector. This call will asynchronously start the connector and its tasks (if * not started already). * @param connector name of the connector */ void resumeConnector(String connector); class Created<T> { private final boolean created; private final T result; public Created(boolean created, T result) { this.created = created; this.result = result; } public boolean created() { return created; } public T result() { return result; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Created<?> created1 = (Created<?>) o; return Objects.equals(created, created1.created) && Objects.equals(result, created1.result); } @Override public int hashCode() { return Objects.hash(created, result); } } }
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * --------------------- * DefaultXYDataset.java * --------------------- * (C) Copyright 2006, 2007, by Object Refinery Limited and Contributors. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): -; * * Changes * ------- * 06-Jul-2006 : Version 1 (DG); * 02-Nov-2006 : Fixed a problem with adding a new series with the same key * as an existing series (see bug 1589392) (DG); * 25-Jan-2007 : Implemented PublicCloneable (DG); * */ package org.jfree.data.xy; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.jfree.data.DomainOrder; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.util.PublicCloneable; /** * A default implementation of the {@link XYDataset} interface that stores * data values in arrays of double primitives. * * @since 1.0.2 */ public class DefaultXYDataset extends AbstractXYDataset implements XYDataset, PublicCloneable { /** * Storage for the series keys. This list must be kept in sync with the * seriesList. */ private List seriesKeys; /** * Storage for the series in the dataset. We use a list because the * order of the series is significant. This list must be kept in sync * with the seriesKeys list. */ private List seriesList; /** * Creates a new <code>DefaultXYDataset</code> instance, initially * containing no data. */ public DefaultXYDataset() { this.seriesKeys = new java.util.ArrayList(); this.seriesList = new java.util.ArrayList(); } /** * Returns the number of series in the dataset. * * @return The series count. */ public int getSeriesCount() { return this.seriesList.size(); } /** * Returns the key for a series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * * @return The key for the series. * * @throws IllegalArgumentException if <code>series</code> is not in the * specified range. */ public Comparable getSeriesKey(int series) { if ((series < 0) || (series >= getSeriesCount())) { throw new IllegalArgumentException("Series index out of bounds"); } return (Comparable) this.seriesKeys.get(series); } /** * Returns the index of the series with the specified key, or -1 if there * is no such series in the dataset. * * @param seriesKey the series key (<code>null</code> permitted). * * @return The index, or -1. */ public int indexOf(Comparable seriesKey) { return this.seriesKeys.indexOf(seriesKey); } /** * Returns the order of the domain (x-) values in the dataset. In this * implementation, we cannot guarantee that the x-values are ordered, so * this method returns <code>DomainOrder.NONE</code>. * * @return <code>DomainOrder.NONE</code>. */ public DomainOrder getDomainOrder() { return DomainOrder.NONE; } /** * Returns the number of items in the specified series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * * @return The item count. * * @throws IllegalArgumentException if <code>series</code> is not in the * specified range. */ public int getItemCount(int series) { if ((series < 0) || (series >= getSeriesCount())) { throw new IllegalArgumentException("Series index out of bounds"); } double[][] seriesArray = (double[][]) this.seriesList.get(series); return seriesArray[0].length; } /** * Returns the x-value for an item within a series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * @param item the item index (in the range <code>0</code> to * <code>getItemCount(series)</code>). * * @return The x-value. * * @throws ArrayIndexOutOfBoundsException if <code>series</code> is not * within the specified range. * @throws ArrayIndexOutOfBoundsException if <code>item</code> is not * within the specified range. * * @see #getX(int, int) */ public double getXValue(int series, int item) { double[][] seriesData = (double[][]) this.seriesList.get(series); return seriesData[0][item]; } /** * Returns the x-value for an item within a series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * @param item the item index (in the range <code>0</code> to * <code>getItemCount(series)</code>). * * @return The x-value. * * @throws ArrayIndexOutOfBoundsException if <code>series</code> is not * within the specified range. * @throws ArrayIndexOutOfBoundsException if <code>item</code> is not * within the specified range. * * @see #getXValue(int, int) */ public Number getX(int series, int item) { return new Double(getXValue(series, item)); } /** * Returns the y-value for an item within a series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * @param item the item index (in the range <code>0</code> to * <code>getItemCount(series)</code>). * * @return The y-value. * * @throws ArrayIndexOutOfBoundsException if <code>series</code> is not * within the specified range. * @throws ArrayIndexOutOfBoundsException if <code>item</code> is not * within the specified range. * * @see #getY(int, int) */ public double getYValue(int series, int item) { double[][] seriesData = (double[][]) this.seriesList.get(series); return seriesData[1][item]; } /** * Returns the y-value for an item within a series. * * @param series the series index (in the range <code>0</code> to * <code>getSeriesCount() - 1</code>). * @param item the item index (in the range <code>0</code> to * <code>getItemCount(series)</code>). * * @return The y-value. * * @throws ArrayIndexOutOfBoundsException if <code>series</code> is not * within the specified range. * @throws ArrayIndexOutOfBoundsException if <code>item</code> is not * within the specified range. * * @see #getX(int, int) */ public Number getY(int series, int item) { return new Double(getYValue(series, item)); } /** * Adds a series or if a series with the same key already exists replaces * the data for that series, then sends a {@link DatasetChangeEvent} to * all registered listeners. * * @param seriesKey the series key (<code>null</code> not permitted). * @param data the data (must be an array with length 2, containing two * arrays of equal length, the first containing the x-values and the * second containing the y-values). */ public void addSeries(Comparable seriesKey, double[][] data) { if (seriesKey == null) { throw new IllegalArgumentException( "The 'seriesKey' cannot be null."); } if (data == null) { throw new IllegalArgumentException("The 'data' is null."); } if (data.length != 2) { throw new IllegalArgumentException( "The 'data' array must have length == 2."); } if (data[0].length != data[1].length) { throw new IllegalArgumentException( "The 'data' array must contain two arrays with equal length."); } int seriesIndex = indexOf(seriesKey); if (seriesIndex == -1) { // add a new series this.seriesKeys.add(seriesKey); this.seriesList.add(data); } else { // replace an existing series this.seriesList.remove(seriesIndex); this.seriesList.add(seriesIndex, data); } notifyListeners(new DatasetChangeEvent(this, this)); } /** * Removes a series from the dataset, then sends a * {@link DatasetChangeEvent} to all registered listeners. * * @param seriesKey the series key (<code>null</code> not permitted). * */ public void removeSeries(Comparable seriesKey) { int seriesIndex = indexOf(seriesKey); if (seriesIndex >= 0) { this.seriesKeys.remove(seriesIndex); this.seriesList.remove(seriesIndex); notifyListeners(new DatasetChangeEvent(this, this)); } } /** * Tests this <code>DefaultXYDataset</code> instance for equality with an * arbitrary object. This method returns <code>true</code> if and only if: * <ul> * <li><code>obj</code> is not <code>null</code>;</li> * <li><code>obj</code> is an instance of * <code>DefaultXYDataset</code>;</li> * <li>both datasets have the same number of series, each containing * exactly the same values.</li> * </ul> * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DefaultXYDataset)) { return false; } DefaultXYDataset that = (DefaultXYDataset) obj; if (!this.seriesKeys.equals(that.seriesKeys)) { return false; } for (int i = 0; i < this.seriesList.size(); i++) { double[][] d1 = (double[][]) this.seriesList.get(i); double[][] d2 = (double[][]) that.seriesList.get(i); double[] d1x = d1[0]; double[] d2x = d2[0]; if (!Arrays.equals(d1x, d2x)) { return false; } double[] d1y = d1[1]; double[] d2y = d2[1]; if (!Arrays.equals(d1y, d2y)) { return false; } } return true; } /** * Returns a hash code for this instance. * * @return A hash code. */ public int hashCode() { int result; result = this.seriesKeys.hashCode(); result = 29 * result + this.seriesList.hashCode(); return result; } /** * Creates an independent copy of this dataset. * * @return The cloned dataset. * * @throws CloneNotSupportedException if there is a problem cloning the * dataset (for instance, if a non-cloneable object is used for a * series key). */ public Object clone() throws CloneNotSupportedException { DefaultXYDataset clone = (DefaultXYDataset) super.clone(); clone.seriesKeys = new java.util.ArrayList(this.seriesKeys); clone.seriesList = new ArrayList(this.seriesList.size()); for (int i = 0; i < this.seriesList.size(); i++) { double[][] data = (double[][]) this.seriesList.get(i); double[] x = data[0]; double[] y = data[1]; double[] xx = new double[x.length]; double[] yy = new double[y.length]; System.arraycopy(x, 0, xx, 0, x.length); System.arraycopy(y, 0, yy, 0, y.length); clone.seriesList.add(i, new double[][] {xx, yy}); } return clone; } }
/* Copyright 2014 Matthew Rogers "BossLetsPlays" * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bossletsplays.apophis.entities; import java.awt.Graphics2D; import java.awt.Rectangle; import com.bossletsplays.apophis.Game; import com.bossletsplays.apophis.gfx.Animation; import com.bossletsplays.apophis.tilemaps.Tile; import com.bossletsplays.apophis.tilemaps.TileMap; /** * <strong>Project:</strong> ApophisSpawn <br> * <strong>Class:</strong> Entity * * @author <a href = "http://blp.vc-network.com"> BossLetsPlays</a> */ public abstract class Entity { protected TileMap tileMap; protected int tileSize; protected double xMap, yMap; protected double x, y; protected double dx, dy; //Directions protected int sWidth, sHeight; //sprite width,height protected int width, height; //collision width,height protected int currentRow, currentCol; protected double xDest, yDest; protected double xTemp, yTemp; protected boolean topLeft, topRight, bottomLeft, bottomRight; //collision corners protected Animation animation; protected int currentAction; protected int prevAction; //movements protected boolean facingRight; protected boolean left, right, up, down, jumping, falling; protected double moveSpeed, maxSpeed, stopSpeed, fallSpeed, maxFallSpeed; protected double jumpStart, stopJumpSpeed; public Entity(TileMap tileMap) { this.tileMap = tileMap; this.tileSize = tileMap.getTileSize(); } public abstract void update(); public abstract void draw(Graphics2D g2d); public void render(Graphics2D g2d) { // if (offScreen()) return; setMapPos(); draw(g2d); if (facingRight) g2d.drawImage(animation.getImage(), (int) (x + xMap - sWidth / 2), (int) (y + yMap - sHeight / 2), null); else g2d.drawImage(animation.getImage(), (int) (x + xMap - sWidth / 2 + sWidth), (int) (y + yMap - sHeight / 2), -sWidth, sHeight, null); } public boolean hasCollisionWith(Entity e) { return getBounds().intersects(e.getBounds()); } public void checkTileCollision() { currentCol = (int) x / tileSize; currentRow = (int) y / tileSize; xDest = x + dx; yDest = y + dy; xTemp = x; yTemp = y; calculateCorners(x, yDest); if (dy < 0) { if (topLeft || topRight) { dy = 0; yTemp = currentRow * tileSize + height / 2; } else yTemp += dy; } if (dy > 0) { if (bottomLeft || bottomRight) { dy = 0; falling = false; yTemp = (currentRow + 1) * tileSize - height / 2; } else yTemp += dy; } calculateCorners(xDest, y); if (dx < 0) { if (topLeft || bottomLeft) { dx = 0; xTemp = currentCol * tileSize + width / 2; } else xTemp += dx; } if (dx > 0) { if (topLeft || bottomRight) { dx = 0; xTemp = (currentCol + 1) * tileSize - width / 2; } else xTemp += dx; } if (!falling) { calculateCorners(x, yDest + 1); if (!bottomLeft && !bottomRight) falling = true; } } protected void calculateCorners(double x, double y) { int leftTile = (int) (x - width / 2) / tileSize; int rightTile = (int) (x + width / 2 - 1) / tileSize; int topTile = (int) (y - height / 2) / tileSize; int bottomTile = (int) (y + height / 2 - 1) / tileSize; if (topTile < 0 || bottomTile >= tileMap.getRows() || leftTile < 0 || rightTile >= tileMap.getCols()) { topLeft = topRight = bottomLeft = bottomRight = false; return; } int tl = tileMap.getType(topTile, leftTile); int tr = tileMap.getType(topTile, rightTile); int bl = tileMap.getType(bottomTile, leftTile); int br = tileMap.getType(bottomTile, rightTile); topLeft = tl == Tile.BLOCKED; topRight = tr == Tile.BLOCKED; bottomLeft = bl == Tile.BLOCKED; bottomRight = br == Tile.BLOCKED; } public boolean offScreen() { return x + xMap + width < 0 || x + xMap - width > Game.WIDTH || y + yMap + height < 0 || y + yMap - height > Game.HEIGHT; } public void setPos(double x, double y) { this.x = x; this.y = y; } public void setVector(double dx, double dy) { this.dx = dx; this.dy = dy; } public void setMapPos() { xMap = tileMap.getX(); yMap = tileMap.getY(); } protected Rectangle getBounds() { return new Rectangle((int) x - width, (int) y - height, width, height); } public double getX() { return x; } public double getY() { return y; } public int getSpriteWidth() { return sWidth; } public int getSpriteHeight() { return sHeight; } public int getWidth() { return width; } public int getHeight() { return height; } public boolean isFacingRight() { return facingRight; } public boolean isLeft() { return left; } public boolean isRight() { return right; } public boolean isUp() { return up; } public boolean isDown() { return down; } public void setLeft(boolean left) { this.left = left; } public void setRight(boolean right) { this.right = right; } public void setUp(boolean up) { this.up = up; } public void setDown(boolean down) { this.down = down; } public boolean isJumping() { return jumping; } public boolean isFalling() { return falling; } public void setJumping(boolean jumping) { this.jumping = jumping; } }
/** * @file NoFreeOfReturnValue.java * @brief NoFreeOfReturnValue class source file * @author adarsh.t * * Copyright 2015 by Samsung Electronics, Inc. * All rights reserved. * * Project Description : * This software is the confidential and proprietary information * of Samsung Electronics, Inc. ("Confidential Information"). You * shall not disclose such Confidential Information and shall use * it only in accordance with the terms of the license agreement * you entered into with Samsung Electronics. */ package com.samsung.sec.dexter.vdcpp.checkerlogic; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.eclipse.cdt.core.dom.ast.ASTVisitor; import org.eclipse.cdt.core.dom.ast.IASTBinaryExpression; import org.eclipse.cdt.core.dom.ast.IASTDeclaration; import org.eclipse.cdt.core.dom.ast.IASTExpression; import org.eclipse.cdt.core.dom.ast.IASTFileLocation; import org.eclipse.cdt.core.dom.ast.IASTFunctionCallExpression; import org.eclipse.cdt.core.dom.ast.IASTFunctionDefinition; import org.eclipse.cdt.core.dom.ast.IASTIdExpression; import org.eclipse.cdt.core.dom.ast.IASTInitializerClause; import org.eclipse.cdt.core.dom.ast.IASTName; import org.eclipse.cdt.core.dom.ast.IASTNode; import org.eclipse.cdt.core.dom.ast.IASTSimpleDeclaration; import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit; import org.eclipse.cdt.core.dom.ast.IBinding; import com.samsung.sec.dexter.core.analyzer.AnalysisConfig; import com.samsung.sec.dexter.core.analyzer.AnalysisResult; import com.samsung.sec.dexter.core.checker.IChecker; import com.samsung.sec.dexter.core.defect.PreOccurence; import com.samsung.sec.dexter.vdcpp.plugin.DexterVdCppPlugin; import com.samsung.sec.dexter.vdcpp.util.CppUtil; public class NoFreeOfReturnValueCheckerLogic implements ICheckerLogic{ private IASTTranslationUnit translationUnit; private String[] lstMethods=null; @Override public void analyze(final AnalysisConfig config, final AnalysisResult result, final IChecker checker, IASTTranslationUnit unit) { translationUnit =unit; lstMethods= checker.getProperty("method-list").split(","); ASTVisitor visitor = createVisitor(config, result, checker); visitor.shouldVisitDeclarations = true; unit.accept(visitor); } private ASTVisitor createVisitor(final AnalysisConfig config, final AnalysisResult result, final IChecker checker) { ASTVisitor visitor = new ASTVisitor() { @Override public int visit(IASTDeclaration ast ) { if(ast instanceof IASTFunctionDefinition) { visitFunction(config, result, checker, ast); } else if(ast instanceof IASTSimpleDeclaration) { visitOtherCompoundDeclaration(config, result, checker, ast); } return super.visit(ast); } private void visitOtherCompoundDeclaration( final AnalysisConfig config, final AnalysisResult result, final IChecker checker, final IASTDeclaration ast) { ASTVisitor visitor = new ASTVisitor() { public int visit(IASTExpression astExpression ) { if(astExpression instanceof IASTFunctionCallExpression) { visitFunctionCallExpressionForCompoundBlocks( config, result, checker, ast, astExpression); } return ASTVisitor.PROCESS_CONTINUE; } private void visitFunctionCallExpressionForCompoundBlocks( final AnalysisConfig config, final AnalysisResult result, final IChecker checker, final IASTDeclaration ast, IASTExpression astExpression) { IASTExpression functionCallExpression = ((IASTFunctionCallExpression) astExpression).getFunctionNameExpression(); String functionName =functionCallExpression.getRawSignature(); if(functionCallExpression instanceof IASTIdExpression) { functionName =((IASTIdExpression) functionCallExpression).getName().toString(); } for (String methodName : lstMethods) { if(functionName.equals(methodName)) { IASTNode node =astExpression.getParent(); if(node instanceof IASTBinaryExpression) { visitBinaryExpression(config, result, checker, ast, astExpression, functionName, node); } } } } private void visitBinaryExpression( final AnalysisConfig config, final AnalysisResult result, final IChecker checker, final IASTDeclaration ast, IASTExpression astExpression, String functionName, IASTNode node) { IASTExpression binaryExpression =((IASTBinaryExpression) node).getOperand1(); if(binaryExpression instanceof IASTIdExpression) { IASTName name =((IASTIdExpression) binaryExpression).getName(); String ExpName =name.toString(); final IBinding binding = name.resolveBinding(); if ((binding != null) ) { boolean status= checkforFreeFunctionCall( ast, ExpName, binding); if(!status) { fillDefectData( config, result, checker, astExpression.getFileLocation(), checker.getDescription(), functionName); } } } } }; visitor.shouldVisitExpressions = true; ast.accept(visitor); } private void visitFunction(final AnalysisConfig config, final AnalysisResult result, final IChecker checker, final IASTDeclaration ast) { ASTVisitor visitor = new ASTVisitor() { public int visit(IASTExpression astExpression ) { if(astExpression instanceof IASTFunctionCallExpression) { visitFunctionCallExpressionForFunctionBlocks( config, result, checker, ast, astExpression); } return ASTVisitor.PROCESS_CONTINUE; } private void visitFunctionCallExpressionForFunctionBlocks( final AnalysisConfig config, final AnalysisResult result, final IChecker checker, final IASTDeclaration ast, IASTExpression astExpression) { IASTExpression functionCallExpression = ((IASTFunctionCallExpression) astExpression).getFunctionNameExpression(); String functionName =functionCallExpression.getRawSignature(); if(functionCallExpression instanceof IASTIdExpression) { functionName =((IASTIdExpression) functionCallExpression).getName().toString(); } for (String methodName : lstMethods) { if(functionName.equals(methodName)) { IASTNode node =astExpression.getParent(); if(node instanceof IASTBinaryExpression) { visitBinaryExpressionForFunctionBlocks( config, result, checker, ast, astExpression, functionName, node); } } } } private void visitBinaryExpressionForFunctionBlocks( final AnalysisConfig config, final AnalysisResult result, final IChecker checker, final IASTDeclaration ast, IASTExpression astExpression, String functionName, IASTNode node) { IASTExpression binaryExpression =((IASTBinaryExpression) node).getOperand1(); if(binaryExpression instanceof IASTIdExpression) { IASTName name =((IASTIdExpression) binaryExpression).getName(); String ExpName =name.toString(); final IBinding binding = name.resolveBinding(); if ((binding != null) ) { boolean status= checkforFreeFunctionCall( ast, ExpName, binding); if(!status) { fillDefectData( config, result, checker, astExpression.getFileLocation(), checker.getDescription(), functionName); } } } } }; visitor.shouldVisitExpressions = true; ast.accept(visitor); } private boolean checkforFreeFunctionCall( final IASTDeclaration ast, String ExpName, final IBinding binding) { boolean status =false; final IASTName[] references = ast.getTranslationUnit().getReferences(binding); for (IASTName reference : references) { IASTNode parent =reference.getParent().getParent(); if(parent instanceof IASTFunctionCallExpression) { IASTExpression expression = ((IASTFunctionCallExpression) parent).getFunctionNameExpression(); IASTInitializerClause[] expParameter =((IASTFunctionCallExpression) parent).getArguments(); List<String> parameter =new ArrayList<String>(); for (IASTInitializerClause string : expParameter) { parameter.add(string.toString()); } if(expression instanceof IASTIdExpression) { String functionName =((IASTIdExpression) expression).getName().toString(); if(functionName.equals("free") && parameter.contains(ExpName)) { status =true; } } } } return status; } private void fillDefectData(AnalysisConfig config, AnalysisResult result, IChecker checker, IASTFileLocation fileLocation, String message, String declaratorName) { PreOccurence preOcc = createPreOccurence(config, checker, fileLocation, message,declaratorName); result.addDefectWithPreOccurence(preOcc); } private PreOccurence createPreOccurence(AnalysisConfig config, IChecker checker, IASTFileLocation fileLocation, String msg,String decName) { final int startLine = fileLocation.getStartingLineNumber(); final int endLine = fileLocation.getEndingLineNumber(); final int startOffset = fileLocation.getNodeOffset(); final int endOffset = startOffset + fileLocation.getNodeLength(); Map<String,String> tempmap =CppUtil.extractModuleName(translationUnit, startLine); String className =tempmap.get("className"); String methodName =tempmap.get("methodName"); PreOccurence preOcc = new PreOccurence(); preOcc.setCheckerCode(checker.getCode()); preOcc.setFileName(config.getFileName()); preOcc.setModulePath(config.getModulePath()); preOcc.setClassName(className); preOcc.setMethodName(methodName); preOcc.setLanguage(config.getLanguageEnum().toString()); preOcc.setSeverityCode(checker.getSeverityCode()); preOcc.setMessage(checker.getDescription()); preOcc.setToolName(DexterVdCppPlugin.PLUGIN_NAME); preOcc.setStartLine(startLine); preOcc.setEndLine(endLine); preOcc.setCharStart(startOffset); preOcc.setCharEnd(endOffset); preOcc.setVariableName(decName); msg =msg.replace("${methodName}", decName); preOcc.setMessage(msg); preOcc.setStringValue(msg); return preOcc; } }; return visitor; } }
package engine; import engine.effects.AbstractBuff; import graphics.Circle; import log.IGLog; import java.awt.*; import java.awt.geom.Point2D; import java.util.Collections; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** * An entity which allow to have movements. * It has a speed and a direction. */ public abstract class AbstractMovableEntity extends Entity implements Movable { private final static double BACKWARD_MODIFIER = 0.5; /** * Entity's speed. */ private float speed; private AtomicInteger speedModifier; private AtomicBoolean invulnerable = new AtomicBoolean(false); private Set<Class> buffs = Collections.newSetFromMap(new ConcurrentHashMap<>()); /** * Entity's direction. * It's in degree. It will always be between 0 and 360. */ private float direction; /** * An empty entity, with null speed and null direction. */ public AbstractMovableEntity() { super(); speed = 0; speedModifier = new AtomicInteger(100); direction = 0; } /** * Construct an entity with the given speed and direction. * @param speed entity's speed * @param direction entity's direction */ public AbstractMovableEntity(float speed, float direction) { super(); this.speed = speed; speedModifier = new AtomicInteger(100); this.direction = direction; } /** * Create an entity with a start point, a size, a speed and a direction. * @param startPosition entity's start position * @param size entity's size * @param speed entity's speed * @param direction entity's direction */ public AbstractMovableEntity(Point startPosition, Dimension size, float speed, float direction) { super(startPosition, size); this.speed = speed; speedModifier = new AtomicInteger(100); this.direction = direction; } @Override public Object clone() throws CloneNotSupportedException { AbstractMovableEntity e = (AbstractMovableEntity) super.clone(); e.speed = getSpeed(); e.direction = getDirection(); return e; } /** * Return entity's speed. * @return entity's speed. */ public float getSpeed() { return speed; } /** * Set the new entity's speed. * Negative speed is allowed. * @param speed the new speed */ public void setSpeed(float speed) { this.speed = speed; } /** * Return entity's direction in Radian. * @return entity's direction */ public float getDirection() { return direction; } public void addSpeedModifier(int modifier) { System.out.println( "AbstractMovableEntity -> Entity {" + this + "}," + " addSpeedModifier(" + modifier + "), new value = " + speedModifier.addAndGet(modifier) ); } public int getSpeedModifier() { return speedModifier.get(); } public boolean getInvulnerable() { return invulnerable.get(); } public void setInvulnerable(boolean invulnerable) { System.out.println( "AbstractMovableEntity::setInvulnerable -> Entity {" + this + "}, " + "setInvulnerable(" + invulnerable + ")" ); this.invulnerable.set(invulnerable); } /** * Return entity's direction in Radian. * @return entity's direction in Radian. */ public double getDirectionRadian() { return Math.toRadians(direction); } /** * Set the new entity's direction. * If the direction is not between 0 and 360, it will be recalculate. * Negative degree will be transformed into positive. * @param direction */ public void setDirection(float direction) { this.direction = direction; while (this.direction < 0) this.direction += 360; while (this.direction >= 360) this.direction -= 360; } /** * Add the new degree for the entity. * Does the same thing as : e.setDirection(e.getDirection() + direction); * @param direction */ public void addDirection(float direction) { setDirection(getDirection() + direction); } public void addBuff(Class c) { if (AbstractBuff.class.isAssignableFrom(c)) { IGLog.info("AbstractMovableEntity::addBuf() -> add " + c.getName()); buffs.add(c); } else { IGLog.error("AbstractMovableEntity::addBuff() -> failure..."); } } public boolean removeBuff(Class c) { IGLog.info("AbstractMovableEntity::removeBuff() -> remove " + c.getName()); return buffs.remove(c); } public boolean hasBuff(Class c) { IGLog.info("AbstractMovableEntity::hasBuf() -> has " + c.getName() + "?"); return buffs.contains(c); } @Override public void move(Direction d, boolean multiple) { double nextX, nextY; Point2D position = getPoint(); double angle = getDirectionRadian(); Dimension entitySize = getSize(); double speed = getSpeed() * (((double)getSpeedModifier()) / 100.); float angleModifier = !multiple ? 2.f : 0.8f; Point2D newPoint; Circle newCircle; switch (d) { case FRONT: nextX = position.getX() + (speed * Math.cos(angle)); nextY = position.getY() + (speed * Math.sin(angle)); newPoint = new Point2D.Double(nextX, nextY); newCircle = new Circle(nextX, nextY, entitySize.getWidth() / 2); if (!getManager().outOfBound(newCircle) && !getManager().hasCrossCollision(this, newCircle)) setPoint(newPoint); break; case BELOW: nextX = position.getX() - ((speed * BACKWARD_MODIFIER) * Math.cos(angle)); nextY = position.getY() - ((speed * BACKWARD_MODIFIER) * Math.sin(angle)); newPoint = new Point2D.Double(nextX, nextY); newCircle = new Circle(nextX, nextY, entitySize.getWidth() / 2); if (!getManager().outOfBound(newCircle) && !getManager().hasCrossCollision(this, newCircle)) { setPoint(newPoint); } break; case LEFT: addDirection(-angleModifier * getSpeed()); break; case RIGHT: addDirection(angleModifier * getSpeed()); break; default: IGLog.error("Glutton::move -> Unknown direction."); break; } } @Override public void translate(double dx, double dy) { setPoint(new Point2D.Double(getX() + dx, getY() + dy)); } @Override public void moveTo(double x, double y) { setPoint(new Point2D.Double(x, y)); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.rest.content.service.api; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.util.ISO8601DateFormat; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHeaders; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicHeader; import org.eclipse.jetty.server.Server; import org.flowable.content.api.ContentItem; import org.flowable.content.api.ContentService; import org.flowable.content.engine.ContentEngine; import org.flowable.content.engine.ContentEngineConfiguration; import org.flowable.content.engine.impl.test.AbstractContentTestCase; import org.flowable.content.engine.test.ContentTestHelper; import org.flowable.rest.content.ContentRestUrlBuilder; import org.flowable.rest.content.conf.ApplicationConfiguration; import org.flowable.rest.content.util.TestServerUtil; import org.flowable.rest.content.util.TestServerUtil.TestServer; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import junit.framework.AssertionFailedError; public abstract class BaseSpringContentRestTestCase extends AbstractContentTestCase { private static final Logger LOGGER = LoggerFactory.getLogger(BaseSpringContentRestTestCase.class); protected static String SERVER_URL_PREFIX; protected static ContentRestUrlBuilder URL_BUILDER; protected static Server server; protected static ApplicationContext appContext; protected ObjectMapper objectMapper = new ObjectMapper(); protected static ContentEngine contentEngine; protected Throwable exception; protected static ContentEngineConfiguration contentEngineConfiguration; protected static ContentService contentService; protected static CloseableHttpClient client; protected static LinkedList<CloseableHttpResponse> httpResponses = new LinkedList<>(); protected ISO8601DateFormat ISO_DATE_FORMAT = new ISO8601DateFormat(); static { TestServer testServer = TestServerUtil.createAndStartServer(ApplicationConfiguration.class); server = testServer.getServer(); appContext = testServer.getApplicationContext(); SERVER_URL_PREFIX = testServer.getServerUrlPrefix(); URL_BUILDER = ContentRestUrlBuilder.usingBaseUrl(SERVER_URL_PREFIX); // Lookup services contentEngine = appContext.getBean(ContentEngine.class); contentEngineConfiguration = contentEngine.getContentEngineConfiguration(); contentService = contentEngine.getContentService(); // Create http client for all tests CredentialsProvider provider = new BasicCredentialsProvider(); UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("kermit", "kermit"); provider.setCredentials(AuthScope.ANY, credentials); client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build(); // Clean shutdown Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { if (client != null) { try { client.close(); } catch (IOException e) { LOGGER.error("Could not close http client", e); } } if (server != null && server.isRunning()) { try { server.stop(); } catch (Exception e) { LOGGER.error("Error stopping server", e); } } } }); } @Override public void runBare() throws Throwable { try { super.runBare(); } catch (AssertionFailedError e) { LOGGER.error(EMPTY_LINE); LOGGER.error("ASSERTION FAILED: {}", e, e); exception = e; throw e; } catch (Throwable e) { LOGGER.error(EMPTY_LINE); LOGGER.error("EXCEPTION: {}", e, e); exception = e; throw e; } finally { ContentTestHelper.assertAndEnsureCleanDb(contentEngine); contentEngineConfiguration.getClock().reset(); closeHttpConnections(); } } /** * IMPORTANT: calling method is responsible for calling close() on returned {@link HttpResponse} to free the connection. */ public CloseableHttpResponse executeRequest(HttpUriRequest request, int expectedStatusCode) { return internalExecuteRequest(request, expectedStatusCode, true); } /** * IMPORTANT: calling method is responsible for calling close() on returned {@link HttpResponse} to free the connection. */ public CloseableHttpResponse executeBinaryRequest(HttpUriRequest request, int expectedStatusCode) { return internalExecuteRequest(request, expectedStatusCode, false); } protected CloseableHttpResponse internalExecuteRequest(HttpUriRequest request, int expectedStatusCode, boolean addJsonContentType) { CloseableHttpResponse response = null; try { if (addJsonContentType && request.getFirstHeader(HttpHeaders.CONTENT_TYPE) == null) { // Revert to default content-type request.addHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json")); } response = client.execute(request); Assert.assertNotNull(response.getStatusLine()); int responseStatusCode = response.getStatusLine().getStatusCode(); if (expectedStatusCode != responseStatusCode) { LOGGER.info("Wrong status code : {}, but should be {}", responseStatusCode, expectedStatusCode); LOGGER.info("Response body: {}", IOUtils.toString(response.getEntity().getContent())); } Assert.assertEquals(expectedStatusCode, responseStatusCode); httpResponses.add(response); return response; } catch (ClientProtocolException e) { Assert.fail(e.getMessage()); } catch (IOException e) { Assert.fail(e.getMessage()); } return null; } public void closeResponse(CloseableHttpResponse response) { if (response != null) { try { response.close(); } catch (IOException e) { fail("Could not close http connection"); } } } protected void closeHttpConnections() { for (CloseableHttpResponse response : httpResponses) { if (response != null) { try { response.close(); } catch (IOException e) { LOGGER.error("Could not close http connection", e); } } } httpResponses.clear(); } protected String encode(String string) { if (string != null) { try { return URLEncoder.encode(string, "UTF-8"); } catch (UnsupportedEncodingException uee) { throw new IllegalStateException("JVM does not support UTF-8 encoding.", uee); } } return null; } /** * Checks if the returned "data" array (child-node of root-json node returned by invoking a GET on the given url) contains entries with the given ID's. */ protected void assertResultsPresentInDataResponse(String url, String... expectedResourceIds) throws JsonProcessingException, IOException { int numberOfResultsExpected = expectedResourceIds.length; // Do the actual call CloseableHttpResponse response = executeRequest(new HttpGet(SERVER_URL_PREFIX + url), HttpStatus.SC_OK); // Check status and size JsonNode dataNode = objectMapper.readTree(response.getEntity().getContent()).get("data"); closeResponse(response); assertEquals(numberOfResultsExpected, dataNode.size()); // Check presence of ID's List<String> toBeFound = new ArrayList<>(Arrays.asList(expectedResourceIds)); Iterator<JsonNode> it = dataNode.iterator(); while (it.hasNext()) { String id = it.next().get("id").textValue(); toBeFound.remove(id); } assertTrue("Not all expected ids have been found in result, missing: " + StringUtils.join(toBeFound, ", "), toBeFound.isEmpty()); } protected void assertResultsPresentInPostDataResponse(String url, ObjectNode body, String... expectedResourceIds) throws JsonProcessingException, IOException { assertResultsPresentInPostDataResponseWithStatusCheck(url, body, HttpStatus.SC_OK, expectedResourceIds); } protected void assertResultsPresentInPostDataResponseWithStatusCheck(String url, ObjectNode body, int expectedStatusCode, String... expectedResourceIds) throws JsonProcessingException, IOException { int numberOfResultsExpected = 0; if (expectedResourceIds != null) { numberOfResultsExpected = expectedResourceIds.length; } // Do the actual call HttpPost post = new HttpPost(SERVER_URL_PREFIX + url); post.setEntity(new StringEntity(body.toString())); CloseableHttpResponse response = executeRequest(post, expectedStatusCode); if (expectedStatusCode == HttpStatus.SC_OK) { // Check status and size JsonNode rootNode = objectMapper.readTree(response.getEntity().getContent()); JsonNode dataNode = rootNode.get("data"); assertEquals(numberOfResultsExpected, dataNode.size()); // Check presence of ID's if (expectedResourceIds != null) { List<String> toBeFound = new ArrayList<>(Arrays.asList(expectedResourceIds)); Iterator<JsonNode> it = dataNode.iterator(); while (it.hasNext()) { String id = it.next().get("id").textValue(); toBeFound.remove(id); } assertTrue("Not all entries have been found in result, missing: " + StringUtils.join(toBeFound, ", "), toBeFound.isEmpty()); } } closeResponse(response); } protected void assertEmptyResultsPresentInDataResponse(String url) throws JsonProcessingException, IOException { // Do the actual call CloseableHttpResponse response = executeRequest(new HttpGet(SERVER_URL_PREFIX + url), HttpStatus.SC_OK); // Check status and size JsonNode dataNode = objectMapper.readTree(response.getEntity().getContent()).get("data"); closeResponse(response); assertEquals(0, dataNode.size()); } /** * Extract a date from the given string. Assertion fails when invalid date has been provided. */ protected Date getDateFromISOString(String isoString) { DateTimeFormatter dateFormat = ISODateTimeFormat.dateTime(); try { return dateFormat.parseDateTime(isoString).toDate(); } catch (IllegalArgumentException iae) { fail("Illegal date provided: " + isoString); return null; } } protected String getISODateString(Date time) { return ISO_DATE_FORMAT.format(time); } protected String createContentItem(String name, String mimeType, String taskId, String processInstanceId, String caseId, String tenantId, String createdBy, String lastModifiedBy) { ContentItem contentItem = fillAndCreateContentItem(name, mimeType, taskId, processInstanceId, caseId, tenantId, createdBy, lastModifiedBy); contentService.saveContentItem(contentItem); return contentItem.getId(); } protected String createContentItem(String name, String mimeType, String taskId, String processInstanceId, String caseId, String tenantId, String createdBy, String lastModifiedBy, InputStream fileStream) { ContentItem contentItem = fillAndCreateContentItem(name, mimeType, taskId, processInstanceId, caseId, tenantId, createdBy, lastModifiedBy); contentService.saveContentItem(contentItem, fileStream); return contentItem.getId(); } protected ContentItem fillAndCreateContentItem(String name, String mimeType, String taskId, String processInstanceId, String caseId, String tenantId, String createdBy, String lastModifiedBy) { ContentItem contentItem = contentService.newContentItem(); contentItem.setName(name); contentItem.setMimeType(mimeType); contentItem.setTaskId(taskId); contentItem.setProcessInstanceId(processInstanceId); if (StringUtils.isNotEmpty(caseId)) { contentItem.setScopeType("cmmn"); contentItem.setScopeId(caseId); } contentItem.setTenantId(tenantId); contentItem.setCreatedBy(createdBy); contentItem.setLastModifiedBy(lastModifiedBy); return contentItem; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public static final String CONTENT_TYPE = "string"; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; public static class Defaults { public static final MappedFieldType FIELD_TYPE = new StringFieldType(); static { FIELD_TYPE.freeze(); } // NOTE, when adding defaults here, make sure you add them in the builder public static final String NULL_VALUE = null; /** * Post 2.0 default for position_increment_gap. Set to 100 so that * phrase queries of reasonably high slop will not match across field * values. */ public static final int POSITION_INCREMENT_GAP = 100; public static final int POSITION_INCREMENT_GAP_PRE_2_0 = 0; public static final int IGNORE_ABOVE = -1; /** * The default position_increment_gap for a particular version of Elasticsearch. */ public static int positionIncrementGap(Version version) { if (version.before(Version.V_2_0_0_beta1)) { return POSITION_INCREMENT_GAP_PRE_2_0; } return POSITION_INCREMENT_GAP; } } public static class Builder extends FieldMapper.Builder<Builder, StringFieldMapper> { protected String nullValue = Defaults.NULL_VALUE; /** * The distance between tokens from different values in the same field. * POSITION_INCREMENT_GAP_USE_ANALYZER means default to the analyzer's * setting which in turn defaults to Defaults.POSITION_INCREMENT_GAP. */ protected int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER; protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } @Override public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { super.searchAnalyzer(searchAnalyzer); return this; } public Builder positionIncrementGap(int positionIncrementGap) { this.positionIncrementGap = positionIncrementGap; return this; } public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) { this.fieldType.setSearchQuoteAnalyzer(analyzer); return builder; } public Builder ignoreAbove(int ignoreAbove) { this.ignoreAbove = ignoreAbove; return this; } @Override public StringFieldMapper build(BuilderContext context) { // if the field is not analyzed, then by default, we should omit norms and have docs only // index options, as probably what the user really wants // if they are set explicitly, we will use those values // we also change the values on the default field type so that toXContent emits what // differs from the defaults if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) { defaultFieldType.setOmitNorms(true); defaultFieldType.setIndexOptions(IndexOptions.DOCS); if (!omitNormsSet && fieldType.boost() == 1.0f) { fieldType.setOmitNorms(true); } if (!indexOptionsSet) { fieldType.setIndexOptions(IndexOptions.DOCS); } } if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { throw new IllegalArgumentException("Cannot set position_increment_gap on field [" + name + "] without positions enabled"); } fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap)); fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap)); fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap)); } setupFieldType(context); StringFieldMapper fieldMapper = new StringFieldMapper( name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); return fieldMapper.includeInAll(includeInAll); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { StringFieldMapper.Builder builder = stringField(name); parseTextField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } builder.nullValue(propNode.toString()); iterator.remove(); } else if (propName.equals("search_quote_analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); } builder.searchQuotedAnalyzer(analyzer); iterator.remove(); } else if (propName.equals("position_increment_gap") || parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) { int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); if (newPositionIncrementGap < 0) { throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed."); } builder.positionIncrementGap(newPositionIncrementGap); // we need to update to actual analyzers if they are not set in this case... // so we can inject the position increment gap... if (builder.fieldType().indexAnalyzer() == null) { builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer()); } if (builder.fieldType().searchAnalyzer() == null) { builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer()); } if (builder.fieldType().searchQuoteAnalyzer() == null) { builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer()); } iterator.remove(); } else if (propName.equals("ignore_above")) { builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); } } return builder; } } public static final class StringFieldType extends MappedFieldType implements org.elasticsearch.index.mapper.StringFieldType { public StringFieldType() {} protected StringFieldType(StringFieldType ref) { super(ref); } public StringFieldType clone() { return new StringFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } @Override public String value(Object value) { if (value == null) { return null; } return value.toString(); } @Override public Query nullValueQuery() { if (nullValue() == null) { return null; } return termQuery(nullValue(), null); } } private Boolean includeInAll; private int positionIncrementGap; private int ignoreAbove; protected StringFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, int positionIncrementGap, int ignoreAbove, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values"); } this.positionIncrementGap = positionIncrementGap; this.ignoreAbove = ignoreAbove; } @Override protected StringFieldMapper clone() { return (StringFieldMapper) super.clone(); } @Override public StringFieldMapper includeInAll(Boolean includeInAll) { if (includeInAll != null) { StringFieldMapper clone = clone(); clone.includeInAll = includeInAll; return clone; } else { return this; } } @Override public StringFieldMapper includeInAllIfNotSet(Boolean includeInAll) { if (includeInAll != null && this.includeInAll == null) { StringFieldMapper clone = clone(); clone.includeInAll = includeInAll; return clone; } else { return this; } } @Override public StringFieldMapper unsetIncludeInAll() { if (includeInAll != null) { StringFieldMapper clone = clone(); clone.includeInAll = null; return clone; } else { return this; } } @Override protected boolean customBoost() { return true; } public int getPositionIncrementGap() { return this.positionIncrementGap; } public int getIgnoreAbove() { return ignoreAbove; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType().boost()); if (valueAndBoost.value() == null) { return; } if (ignoreAbove > 0 && valueAndBoost.value().length() > ignoreAbove) { return; } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType().names().fullName(), valueAndBoost.value(), valueAndBoost.boost()); } if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { Field field = new Field(fieldType().names().indexName(), valueAndBoost.value(), fieldType()); field.setBoost(valueAndBoost.boost()); fields.add(field); } if (fieldType().hasDocValues()) { fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(valueAndBoost.value()))); } } /** * Parse a field as though it were a string. * @param context parse context used during parsing * @param nullValue value to use for null * @param defaultBoost default boost value returned unless overwritten in the field * @return the parsed field and the boost either parsed or defaulted * @throws IOException if thrown while parsing */ public static ValueAndBoost parseCreateFieldForString(ParseContext context, String nullValue, float defaultBoost) throws IOException { if (context.externalValueSet()) { return new ValueAndBoost(context.externalValue().toString(), defaultBoost); } XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { return new ValueAndBoost(nullValue, defaultBoost); } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; String currentFieldName = null; String value = nullValue; float boost = defaultBoost; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { value = parser.textOrNull(); } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } return new ValueAndBoost(value, boost); } return new ValueAndBoost(parser.textOrNull(), defaultBoost); } @Override protected String contentType() { return CONTENT_TYPE; } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); doXContentAnalyzers(builder, includeDefaults); if (includeDefaults || fieldType().nullValue() != null) { builder.field("null_value", fieldType().nullValue()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { builder.field("position_increment_gap", positionIncrementGap); } if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) { builder.field("ignore_above", ignoreAbove); } } /** * Parsed value and boost to be returned from {@link #parseCreateFieldForString}. */ public static class ValueAndBoost { private final String value; private final float boost; public ValueAndBoost(String value, float boost) { this.value = value; this.boost = boost; } /** * Value of string field. * @return value of string field */ public String value() { return value; } /** * Boost either parsed from the document or defaulted. * @return boost either parsed from the document or defaulted */ public float boost() { return boost; } } }
package com.windowsazure.messaging.e2e; import static org.junit.Assert.*; import java.util.List; import java.util.Properties; import java.util.UUID; import org.junit.Before; import org.junit.Test; import com.windowsazure.messaging.*; public class HubCrudsE2E { private String connectionString; private String hubPath; private String gcmkey; private String admid; private String admsecret; private String apnscert; private String apnskey; private String mpnscert; private String mpnskey; private String winsid; private String winkey; private String baidukey; private String baidusecret; @Before public void setUp() throws Exception { Properties p = new Properties(); p.load(this.getClass().getResourceAsStream("e2eSetup.properties")); connectionString = p.getProperty("connectionstring"); assertTrue(connectionString!=null && !connectionString.isEmpty()); gcmkey = p.getProperty("gcmkey"); admid = p.getProperty("admid"); admsecret = p.getProperty("admsecret"); apnscert = p.getProperty("apnscert"); apnskey = p.getProperty("apnskey"); mpnscert = p.getProperty("mpnscert"); mpnskey = p.getProperty("mpnskey"); winsid = p.getProperty("winsid"); winkey = p.getProperty("winkey"); baidukey = p.getProperty("baidukey"); baidusecret = p.getProperty("baidusecret"); hubPath = "JavaSDK_" + UUID.randomUUID().toString(); } @Test public void GcmCrudsTest() throws Exception{ assertTrue(gcmkey!=null && !gcmkey.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); // Create new NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setGcmCredential(new GcmCredential(gcmkey)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getGcmCredential()); assertEquals(gcmkey, hub.getGcmCredential().getGoogleApiKey()); // Get by path hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getGcmCredential()); assertEquals(gcmkey, hub.getGcmCredential().getGoogleApiKey()); // Update hub = nsm.updateNotificationHub(hub); assertNotNull(hub); assertNotNull(hub.getGcmCredential()); assertEquals(gcmkey, hub.getGcmCredential().getGoogleApiKey()); // Get collection List<NotificationHubDescription> hubs = nsm.getNotificationHubs(); assertTrue(hubs.size() > 0); boolean found = false; for (NotificationHubDescription h : hubs) found |=h.getPath().equalsIgnoreCase(hubPath); assertTrue(found); // Delete nsm.deleteNotificationHub(hubPath); Thread.sleep(1000); hubs = nsm.getNotificationHubs(); found = false; for (NotificationHubDescription h : hubs) found |=h.getPath() == hubPath; assertFalse(found); } @Test public void AdmCrudsTest() throws Exception{ assertTrue(admid!=null && !admid.isEmpty() && admsecret!=null && !admsecret.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); // Create new NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setAdmCredential(new AdmCredential(admid, admsecret)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getAdmCredential()); assertEquals(admid, hub.getAdmCredential().getClientId()); assertEquals(admsecret, hub.getAdmCredential().getClientSecret()); // Get by path hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getAdmCredential()); assertEquals(admid, hub.getAdmCredential().getClientId()); assertEquals(admsecret, hub.getAdmCredential().getClientSecret()); // Update hub = nsm.updateNotificationHub(hub); assertNotNull(hub); assertNotNull(hub.getAdmCredential()); assertEquals(admid, hub.getAdmCredential().getClientId()); assertEquals(admsecret, hub.getAdmCredential().getClientSecret()); // Get collection List<NotificationHubDescription> hubs = nsm.getNotificationHubs(); assertTrue(hubs.size() > 0); boolean found = false; for (NotificationHubDescription h : hubs) found |=h.getPath().equalsIgnoreCase(hubPath); assertTrue(found); // Delete nsm.deleteNotificationHub(hubPath); Thread.sleep(1000); hubs = nsm.getNotificationHubs(); found = false; for (NotificationHubDescription h : hubs) found |=h.getPath() == hubPath; assertFalse(found); } @Test public void ApnsCrudsTest() throws Exception{ assertTrue(apnscert!=null && !apnscert.isEmpty() && apnskey!=null && !apnskey.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); // Create new NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setApnsCredential(new ApnsCredential(apnscert, apnskey)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getApnsCredential()); assertEquals(apnscert, hub.getApnsCredential().getApnsCertificate()); assertEquals(apnskey, hub.getApnsCredential().getCertificateKey()); // Get by path hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getApnsCredential()); assertEquals(apnscert, hub.getApnsCredential().getApnsCertificate()); assertEquals(apnskey, hub.getApnsCredential().getCertificateKey()); // Update hub = nsm.updateNotificationHub(hub); assertNotNull(hub); assertNotNull(hub.getApnsCredential()); assertEquals(apnscert, hub.getApnsCredential().getApnsCertificate()); assertEquals(apnskey, hub.getApnsCredential().getCertificateKey()); // Get collection List<NotificationHubDescription> hubs = nsm.getNotificationHubs(); assertTrue(hubs.size() > 0); boolean found = false; for (NotificationHubDescription h : hubs) found |=h.getPath().equalsIgnoreCase(hubPath); assertTrue(found); // Delete nsm.deleteNotificationHub(hubPath); Thread.sleep(1000); hubs = nsm.getNotificationHubs(); found = false; for (NotificationHubDescription h : hubs) found |=h.getPath() == hubPath; assertFalse(found); } @Test public void MpnsCrudsTest() throws Exception{ assertTrue(mpnscert!=null && !mpnscert.isEmpty() && mpnskey!=null && !mpnskey.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); // Create new NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setMpnsCredential(new MpnsCredential(mpnscert, mpnskey)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getMpnsCredential()); assertEquals(mpnscert, hub.getMpnsCredential().getMpnsCertificate()); assertEquals(mpnskey, hub.getMpnsCredential().getCertificateKey()); // Get by path hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getMpnsCredential()); assertEquals(mpnscert, hub.getMpnsCredential().getMpnsCertificate()); assertEquals(mpnskey, hub.getMpnsCredential().getCertificateKey()); // Update hub = nsm.updateNotificationHub(hub); assertNotNull(hub); assertNotNull(hub.getMpnsCredential()); assertEquals(mpnscert, hub.getMpnsCredential().getMpnsCertificate()); assertEquals(mpnskey, hub.getMpnsCredential().getCertificateKey()); // Get collection List<NotificationHubDescription> hubs = nsm.getNotificationHubs(); assertTrue(hubs.size() > 0); boolean found = false; for (NotificationHubDescription h : hubs) found |=h.getPath().equalsIgnoreCase(hubPath); assertTrue(found); // Delete nsm.deleteNotificationHub(hubPath); Thread.sleep(1000); hubs = nsm.getNotificationHubs(); found = false; for (NotificationHubDescription h : hubs) found |=h.getPath() == hubPath; assertFalse(found); } @Test public void WnsCrudsTest() throws Exception{ assertTrue(winsid!=null && !winsid.isEmpty() && winkey!=null && !winkey.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); // Create new NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setWindowsCredential(new WindowsCredential(winsid,winkey)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getWindowsCredential()); assertEquals(winsid, hub.getWindowsCredential().getPackageSid()); assertEquals(winkey, hub.getWindowsCredential().getSecretKey()); // Get by path hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getWindowsCredential()); assertEquals(winsid, hub.getWindowsCredential().getPackageSid()); assertEquals(winkey, hub.getWindowsCredential().getSecretKey()); // Update hub = nsm.updateNotificationHub(hub); assertNotNull(hub); assertNotNull(hub.getWindowsCredential()); assertEquals(winsid, hub.getWindowsCredential().getPackageSid()); assertEquals(winkey, hub.getWindowsCredential().getSecretKey()); // Get collection List<NotificationHubDescription> hubs = nsm.getNotificationHubs(); assertTrue(hubs.size() > 0); boolean found = false; for (NotificationHubDescription h : hubs) found |=h.getPath().equalsIgnoreCase(hubPath); assertTrue(found); // Delete nsm.deleteNotificationHub(hubPath); Thread.sleep(1000); hubs = nsm.getNotificationHubs(); found = false; for (NotificationHubDescription h : hubs) found |=h.getPath() == hubPath; assertFalse(found); } @Test public void BaiduCrudsTest() throws Exception{ assertTrue(baidukey!=null && !baidukey.isEmpty() && baidusecret!=null && !baidusecret.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); // Create new NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setBaiduCredential(new BaiduCredential(baidukey, baidusecret)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getBaiduCredential()); assertEquals(baidukey, hub.getBaiduCredential().getBaiduApiKey()); assertEquals(baidusecret, hub.getBaiduCredential().getBaiduSecretKey()); // Get by path hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getBaiduCredential()); assertEquals(baidukey, hub.getBaiduCredential().getBaiduApiKey()); assertEquals(baidusecret, hub.getBaiduCredential().getBaiduSecretKey()); // Update hub = nsm.updateNotificationHub(hub); assertNotNull(hub); assertNotNull(hub.getBaiduCredential()); assertEquals(baidukey, hub.getBaiduCredential().getBaiduApiKey()); assertEquals(baidusecret, hub.getBaiduCredential().getBaiduSecretKey()); // Get collection List<NotificationHubDescription> hubs = nsm.getNotificationHubs(); assertTrue(hubs.size() > 0); boolean found = false; for (NotificationHubDescription h : hubs) found |=h.getPath().equalsIgnoreCase(hubPath); assertTrue(found); // Delete nsm.deleteNotificationHub(hubPath); Thread.sleep(1000); hubs = nsm.getNotificationHubs(); found = false; for (NotificationHubDescription h : hubs) found |=h.getPath() == hubPath; assertFalse(found); } @Test public void CreateWithBaiduThenAddAdmTest() throws Exception{ assertTrue(admid!=null && !admid.isEmpty() && admsecret!=null && !admsecret.isEmpty()); assertTrue(baidukey!=null && !baidukey.isEmpty() && baidusecret!=null && !baidusecret.isEmpty()); NamespaceManager nsm = new NamespaceManager(connectionString); NotificationHubDescription hub = new NotificationHubDescription(hubPath); hub.setBaiduCredential(new BaiduCredential(baidukey, baidusecret)); hub = nsm.createNotificationHub(hub); Thread.sleep(1000); assertNotNull(hub); assertNotNull(hub.getBaiduCredential()); assertEquals(baidukey, hub.getBaiduCredential().getBaiduApiKey()); assertEquals(baidusecret, hub.getBaiduCredential().getBaiduSecretKey()); Thread.sleep(1000); hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getBaiduCredential()); assertEquals(baidukey, hub.getBaiduCredential().getBaiduApiKey()); assertEquals(baidusecret, hub.getBaiduCredential().getBaiduSecretKey()); hub.setAdmCredential(new AdmCredential(admid, admsecret)); nsm.updateNotificationHub(hub); Thread.sleep(1000); hub = nsm.getNotificationHub(hubPath); assertNotNull(hub); assertNotNull(hub.getBaiduCredential()); assertEquals(baidukey, hub.getBaiduCredential().getBaiduApiKey()); assertEquals(baidusecret, hub.getBaiduCredential().getBaiduSecretKey()); assertNotNull(hub.getAdmCredential()); assertEquals(admid, hub.getAdmCredential().getClientId()); assertEquals(admsecret, hub.getAdmCredential().getClientSecret()); nsm.deleteNotificationHub(hubPath); } }
package intervaltree; import java.util.Collection; import java.util.Map.Entry; import java.util.Set; class IntervalTreeLeaf<K extends Comparable<? super K>,V> implements IntervalTreeNode<K, V>, Entry<Interval<K>, V> { private final Interval<K> key; private V value; IntervalTreeLeaf(K min, K max, V value) { this(new Interval<K>(min, max),value); } public IntervalTreeLeaf(Interval<K> key, V value) { this.key = key; this.value = value; } @Override public boolean isLeaf() { return true; } @Override public IntervalTreeNode<K, V> getLeft() { throw new UnsupportedOperationException(); } @Override public IntervalTreeNode<K, V> getRight() { throw new UnsupportedOperationException(); } @Override public boolean contains(K point) { return key.contains(point); } @Override public boolean contains(Interval<K> interval) { return key.contains(interval); } @Override public boolean overlaps(K low, K high) { return key.overlaps(low,high); } @Override public boolean overlaps(Interval<K> interval) { return key.overlaps(interval); } @Override public K getLow() { return key.getLow(); } @Override public K getHigh() { return key.getHigh(); } @Override public V getValue() { return value; } @Override public IntervalTreeNode<K, V> put(Interval<K> key, V value) { IntervalTreeNode<K, V> putNode = new IntervalTreeLeaf<K, V>(key, value); if(this.key.getLow().compareTo(key.getLow()) < 0){ return new IntervalTreeBranch<K, V>(this, putNode); }else{ return new IntervalTreeBranch<K, V>(putNode, this); } } @Override public void searchOverlapping(Interval<K> range, Collection<V> accumulator) { if(range.overlaps(key)){ accumulator.add(getValue()); } } @Override public void searchContaining(Interval<K> range, Collection<V> accumulator) { if(key.contains(range)){ accumulator.add(getValue()); } } @Override public int size() { return 1; } @Override public void values(Collection<V> accumulator) { accumulator.add(getValue()); } @Override public IntervalTreeNode<K, V> remove(V value) { if(value.equals(getValue())){ return null; }else{ return this; } } @Override public void entrySet(Set<Entry<Interval<K>, V>> accumulator) { accumulator.add(this); } @Override public Interval<K> getKey() { return key; } @Override public V setValue(V value) { V ret = value; this.value = value; return ret; } @Override public boolean containsValue(V value) { return getValue().equals(value); } @Override public void keySet(Set<Interval<K>> accumulator) { accumulator.add(key); } @Override public boolean containedBy(Interval<K> interval) { return interval.contains(key); } @Override public void searchContainedBy(Interval<K> range, Collection<V> accumulator) { if(containedBy(range)){ accumulator.add(getValue()); } } @Override public Interval<K> getRange() { return key; } @Override public int maxHeight() { return 1; } @Override public IntervalTreeNode<K, V> removeAll(Collection<V> values) { if(values.contains(getValue())){ return null; }else{ return this; } } @Override public void averageHeight(Collection<Integer> heights, int currentHeight) { heights.add(currentHeight + 1); } @Override public IntervalTreeNode<K, V> removeOverlapping(Interval<K> range) { if(key.overlaps(range)){ return null; } return this; } @Override public IntervalTreeNode<K, V> removeContaining(Interval<K> range) { if(key.contains(range)){ return null; } return this; } @Override public IntervalTreeNode<K, V> removeContainedBy(Interval<K> range) { if(range.contains(key)){ return null; } return this; } }
/* ******************************************************************************* * Copyright (C) 1996-2011, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ package com.ibm.icu.dev.test.util; import java.io.PrintWriter; import java.io.StringWriter; import java.text.ParsePosition; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.regex.Pattern; import com.ibm.icu.dev.test.util.CollectionUtilities.InverseMatcher; import com.ibm.icu.dev.test.util.CollectionUtilities.ObjectMatcher; import com.ibm.icu.impl.Utility; import com.ibm.icu.text.SymbolTable; import com.ibm.icu.text.UFormat; import com.ibm.icu.text.UTF16; import com.ibm.icu.text.UnicodeMatcher; import com.ibm.icu.text.UnicodeSet; import com.ibm.icu.text.UnicodeSetIterator; public abstract class UnicodeProperty extends UnicodeLabel { public static final UnicodeSet UNASSIGNED = new UnicodeSet("[:gc=unassigned:]").freeze(); public static final UnicodeSet PRIVATE_USE = new UnicodeSet("[:gc=privateuse:]").freeze(); public static final UnicodeSet SURROGATE = new UnicodeSet("[:gc=surrogate:]").freeze(); public static final UnicodeSet SPECIALS = new UnicodeSet(UNASSIGNED).addAll(PRIVATE_USE).addAll(SURROGATE).freeze(); public static final int SAMPLE_UNASSIGNED = UNASSIGNED.charAt(0); public static final int SAMPLE_PRIVATE_USE = 0xE000; public static final int SAMPLE_SURROGATE = 0xD800; public static final UnicodeSet STUFF_TO_TEST = new UnicodeSet(SPECIALS).complement() .add(SAMPLE_UNASSIGNED).add(SAMPLE_PRIVATE_USE).add(SAMPLE_SURROGATE).freeze(); public static final UnicodeSet STUFF_TO_TEST_WITH_UNASSIGNED = new UnicodeSet("[:any:]").freeze(); public static boolean DEBUG = false; public static String CHECK_NAME = "FC_NFKC_Closure"; public static int CHECK_VALUE = 0x037A; private String name; private String firstNameAlias = null; private int type; private Map valueToFirstValueAlias = null; private boolean hasUniformUnassigned = false; /* * Name: Unicode_1_Name Name: ISO_Comment Name: Name Name: Unicode_1_Name * */ public static final int UNKNOWN = 0, BINARY = 2, EXTENDED_BINARY = 3, ENUMERATED = 4, EXTENDED_ENUMERATED = 5, CATALOG = 6, EXTENDED_CATALOG = 7, MISC = 8, EXTENDED_MISC = 9, STRING = 10, EXTENDED_STRING = 11, NUMERIC = 12, EXTENDED_NUMERIC = 13, START_TYPE = 2, LIMIT_TYPE = 14, EXTENDED_MASK = 1, CORE_MASK = ~EXTENDED_MASK, BINARY_MASK = (1 << BINARY) | (1 << EXTENDED_BINARY), STRING_MASK = (1 << STRING) | (1 << EXTENDED_STRING), STRING_OR_MISC_MASK = (1 << STRING) | (1 << EXTENDED_STRING) | (1 << MISC) | (1 << EXTENDED_MISC), ENUMERATED_OR_CATALOG_MASK = (1 << ENUMERATED) | (1 << EXTENDED_ENUMERATED) | (1 << CATALOG) | (1 << EXTENDED_CATALOG); private static final String[] TYPE_NAMES = { "Unknown", "Unknown", "Binary", "Extended Binary", "Enumerated", "Extended Enumerated", "Catalog", "Extended Catalog", "Miscellaneous", "Extended Miscellaneous", "String", "Extended String", "Numeric", "Extended Numeric", }; public static String getTypeName(int propType) { return TYPE_NAMES[propType]; } public final String getName() { return name; } public final int getType() { return type; } public String getTypeName() { return TYPE_NAMES[type]; } public final boolean isType(int mask) { return ((1 << type) & mask) != 0; } protected final void setName(String string) { if (string == null) throw new IllegalArgumentException("Name must not be null"); name = string; } protected final void setType(int i) { type = i; } public String getVersion() { return _getVersion(); } public String getValue(int codepoint) { if (DEBUG && CHECK_VALUE == codepoint && CHECK_NAME.equals(getName())) { String value = _getValue(codepoint); System.out.println(getName() + "(" + Utility.hex(codepoint) + "):" + (getType() == STRING ? Utility.hex(value) : value)); return value; } return _getValue(codepoint); } // public String getValue(int codepoint, boolean isShort) { // return getValue(codepoint); // } public List getNameAliases(List result) { if (result == null) result = new ArrayList(1); return _getNameAliases(result); } public List getValueAliases(String valueAlias, List result) { if (result == null) result = new ArrayList(1); result = _getValueAliases(valueAlias, result); if (!result.contains(valueAlias)) { // FIX && type < NUMERIC result = _getValueAliases(valueAlias, result); // for debugging throw new IllegalArgumentException("Internal error: " + getName() + " doesn't contain " + valueAlias + ": " + new BagFormatter().join(result)); } return result; } public List getAvailableValues(List result) { if (result == null) result = new ArrayList(1); return _getAvailableValues(result); } protected abstract String _getVersion(); protected abstract String _getValue(int codepoint); protected abstract List _getNameAliases(List result); protected abstract List _getValueAliases(String valueAlias, List result); protected abstract List _getAvailableValues(List result); // conveniences public final List getNameAliases() { return getNameAliases(null); } public final List getValueAliases(String valueAlias) { return getValueAliases(valueAlias, null); } public final List getAvailableValues() { return getAvailableValues(null); } public final String getValue(int codepoint, boolean getShortest) { String result = getValue(codepoint); if (type >= MISC || result == null || !getShortest) return result; return getFirstValueAlias(result); } public final String getFirstNameAlias() { if (firstNameAlias == null) { firstNameAlias = (String) getNameAliases().get(0); } return firstNameAlias; } public final String getFirstValueAlias(String value) { if (valueToFirstValueAlias == null) _getFirstValueAliasCache(); return valueToFirstValueAlias.get(value).toString(); } private void _getFirstValueAliasCache() { maxValueWidth = 0; maxFirstValueAliasWidth = 0; valueToFirstValueAlias = new HashMap(1); Iterator it = getAvailableValues().iterator(); while (it.hasNext()) { String value = (String) it.next(); String first = (String) getValueAliases(value).get(0); if (first == null) { // internal error throw new IllegalArgumentException( "Value not in value aliases: " + value); } if (DEBUG && CHECK_NAME.equals(getName())) { System.out.println("First Alias: " + getName() + ": " + value + " => " + first + new BagFormatter().join(getValueAliases(value))); } valueToFirstValueAlias.put(value, first); if (value.length() > maxValueWidth) { maxValueWidth = value.length(); } if (first.length() > maxFirstValueAliasWidth) { maxFirstValueAliasWidth = first.length(); } } } private int maxValueWidth = -1; private int maxFirstValueAliasWidth = -1; public int getMaxWidth(boolean getShortest) { if (maxValueWidth < 0) _getFirstValueAliasCache(); if (getShortest) return maxFirstValueAliasWidth; return maxValueWidth; } public final UnicodeSet getSet(String propertyValue) { return getSet(propertyValue, null); } public final UnicodeSet getSet(PatternMatcher matcher) { return getSet(matcher, null); } public final UnicodeSet getSet(String propertyValue, UnicodeSet result) { return getSet(new SimpleMatcher(propertyValue, isType(STRING_OR_MISC_MASK) ? null : PROPERTY_COMPARATOR), result); } private UnicodeMap unicodeMap = null; public static final String UNUSED = "??"; public final UnicodeSet getSet(PatternMatcher matcher, UnicodeSet result) { if (result == null) result = new UnicodeSet(); boolean uniformUnassigned = hasUniformUnassigned(); if (isType(STRING_OR_MISC_MASK)) { for (UnicodeSetIterator usi = getStuffToTest(uniformUnassigned); usi.next();) { // int i = 0; i <= 0x10FFFF; ++i int i = usi.codepoint; String value = getValue(i); if (value != null && matcher.matches(value)) { result.add(i); } } return addUntested(result, uniformUnassigned); } List temp = new ArrayList(1); // to avoid reallocating... UnicodeMap um = getUnicodeMap_internal(); Iterator it = um.getAvailableValues(null).iterator(); main: while (it.hasNext()) { String value = (String) it.next(); temp.clear(); Iterator it2 = getValueAliases(value, temp).iterator(); while (it2.hasNext()) { String value2 = (String) it2.next(); // System.out.println("Values:" + value2); if (matcher.matches(value2) || matcher.matches(toSkeleton(value2))) { um.keySet(value, result); continue main; } } } return result; } /* * public UnicodeSet getMatchSet(UnicodeSet result) { if (result == null) * result = new UnicodeSet(); addAll(matchIterator, result); return result; } * * public void setMatchSet(UnicodeSet set) { matchIterator = new * UnicodeSetIterator(set); } */ /** * Utility for debugging */ public static String getStack() { Exception e = new Exception(); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); pw.flush(); return "Showing Stack with fake " + sw.getBuffer().toString(); } // TODO use this instead of plain strings public static class Name implements Comparable { private String skeleton; private String pretty; public final int RAW = 0, TITLE = 1, NORMAL = 2; public Name(String name, int style) { if (name == null) name = ""; if (style == RAW) { skeleton = pretty = name; } else { pretty = regularize(name, style == TITLE); skeleton = toSkeleton(pretty); } } public int compareTo(Object o) { return skeleton.compareTo(((Name) o).skeleton); } public boolean equals(Object o) { return skeleton.equals(((Name) o).skeleton); } public int hashCode() { return skeleton.hashCode(); } public String toString() { return pretty; } } /** * @return the unicode map */ public UnicodeMap getUnicodeMap() { return getUnicodeMap(false); } /** * @return the unicode map */ public UnicodeMap getUnicodeMap(boolean getShortest) { if (!getShortest) return (UnicodeMap) getUnicodeMap_internal().cloneAsThawed(); UnicodeMap result = new UnicodeMap(); boolean uniformUnassigned = hasUniformUnassigned(); for (UnicodeSetIterator usi = getStuffToTest(uniformUnassigned); usi.next();) { // int i = 0; i <= 0x10FFFF; ++i int i = usi.codepoint; // if (DEBUG && i == 0x41) System.out.println(i + "\t" + // getValue(i)); String value = getValue(i, true); result.put(i, value); } return addUntested(result, uniformUnassigned); } /** * @return the unicode map */ protected UnicodeMap getUnicodeMap_internal() { if (unicodeMap == null) unicodeMap = _getUnicodeMap(); return unicodeMap; } protected UnicodeMap _getUnicodeMap() { UnicodeMap result = new UnicodeMap(); HashMap myIntern = new HashMap(); boolean uniformUnassigned = hasUniformUnassigned(); for (UnicodeSetIterator usi = getStuffToTest(uniformUnassigned); usi.next();) { // int i = 0; i <= 0x10FFFF; ++i int i = usi.codepoint; // if (DEBUG && i == 0x41) System.out.println(i + "\t" + // getValue(i)); String value = getValue(i); String iValue = (String) myIntern.get(value); if (iValue == null) myIntern.put(value, iValue = value); result.put(i, iValue); } addUntested(result, uniformUnassigned); if (DEBUG) { for (UnicodeSetIterator usi = getStuffToTest(uniformUnassigned); usi.next();) { // int i = 0; i <= 0x10FFFF; ++i int i = usi.codepoint; // if (DEBUG && i == 0x41) System.out.println(i + "\t" + // getValue(i)); String value = getValue(i); String resultValue = (String) result.getValue(i); if (!value.equals(resultValue)) { throw new RuntimeException("Value failure at: " + Utility.hex(i)); } } } if (DEBUG && CHECK_NAME.equals(getName())) { System.out.println(getName() + ":\t" + getClass().getName() + "\t" + getVersion()); System.out.println(getStack()); System.out.println(result); } return result; } private static UnicodeSetIterator getStuffToTest(boolean uniformUnassigned) { return new UnicodeSetIterator(uniformUnassigned ? STUFF_TO_TEST : STUFF_TO_TEST_WITH_UNASSIGNED); } /** * Really ought to create a Collection UniqueList, that forces uniqueness. * But for now... */ public static Collection addUnique(Object obj, Collection result) { if (obj != null && !result.contains(obj)) result.add(obj); return result; } /** * Utility for managing property & non-string value aliases */ public static final Comparator PROPERTY_COMPARATOR = new Comparator() { public int compare(Object o1, Object o2) { return compareNames((String) o1, (String) o2); } }; /** * Utility for managing property & non-string value aliases * */ // TODO optimize public static boolean equalNames(String a, String b) { if (a == b) return true; if (a == null) return false; return toSkeleton(a).equals(toSkeleton(b)); } /** * Utility for managing property & non-string value aliases */ // TODO optimize public static int compareNames(String a, String b) { if (a == b) return 0; if (a == null) return -1; if (b == null) return 1; return toSkeleton(a).compareTo(toSkeleton(b)); } /** * Utility for managing property & non-string value aliases */ // TODO account for special names, tibetan, hangul public static String toSkeleton(String source) { if (source == null) return null; StringBuffer skeletonBuffer = new StringBuffer(); boolean gotOne = false; // remove spaces, '_', '-' // we can do this with char, since no surrogates are involved for (int i = 0; i < source.length(); ++i) { char ch = source.charAt(i); if (i > 0 && (ch == '_' || ch == ' ' || ch == '-')) { gotOne = true; } else { char ch2 = Character.toLowerCase(ch); if (ch2 != ch) { gotOne = true; skeletonBuffer.append(ch2); } else { skeletonBuffer.append(ch); } } } if (!gotOne) return source; // avoid string creation return skeletonBuffer.toString(); } // get the name skeleton public static String toNameSkeleton(String source) { if (source == null) return null; StringBuffer result = new StringBuffer(); // remove spaces, medial '-' // we can do this with char, since no surrogates are involved for (int i = 0; i < source.length(); ++i) { char ch = source.charAt(i); if (('0' <= ch && ch <= '9') || ('A' <= ch && ch <= 'Z') || ch == '<' || ch == '>') { result.append(ch); } else if (ch == ' ') { // don't copy ever } else if (ch == '-') { // only copy non-medials AND trailing O-E if (0 == i || i == source.length() - 1 || source.charAt(i - 1) == ' ' || source.charAt(i + 1) == ' ' || (i == source.length() - 2 && source.charAt(i - 1) == 'O' && source .charAt(i + 1) == 'E')) { System.out.println("****** EXCEPTION " + source); result.append(ch); } // otherwise don't copy } else { throw new IllegalArgumentException("Illegal Name Char: U+" + Utility.hex(ch) + ", " + ch); } } return result.toString(); } /** * These routines use the Java functions, because they only need to act on * ASCII Changes space, - into _, inserts _ between lower and UPPER. */ public static String regularize(String source, boolean titlecaseStart) { if (source == null) return source; /* * if (source.equals("noBreak")) { // HACK if (titlecaseStart) return * "NoBreak"; return source; } */ StringBuffer result = new StringBuffer(); int lastCat = -1; boolean haveFirstCased = true; for (int i = 0; i < source.length(); ++i) { char c = source.charAt(i); if (c == ' ' || c == '-' || c == '_') { c = '_'; haveFirstCased = true; } if (c == '=') haveFirstCased = true; int cat = Character.getType(c); if (lastCat == Character.LOWERCASE_LETTER && cat == Character.UPPERCASE_LETTER) { result.append('_'); } if (haveFirstCased && (cat == Character.LOWERCASE_LETTER || cat == Character.TITLECASE_LETTER || cat == Character.UPPERCASE_LETTER)) { if (titlecaseStart) { c = Character.toUpperCase(c); } haveFirstCased = false; } result.append(c); lastCat = cat; } return result.toString(); } /** * Utility function for comparing codepoint to string without generating new * string. * * @param codepoint * @param other * @return true if the codepoint equals the string */ public static final boolean equals(int codepoint, String other) { if (other == null) return false; if (other.length() == 1) { return codepoint == other.charAt(0); } if (other.length() == 2) { return other.equals(UTF16.valueOf(codepoint)); } return false; } /** * Utility function for comparing objects that may be null * string. */ public static final <T extends Object> boolean equals(T a, T b) { return a == null ? b == null : b == null ? false : a.equals(b); } /** * Utility that should be on UnicodeSet * * @param source * @param result */ static public void addAll(UnicodeSetIterator source, UnicodeSet result) { while (source.nextRange()) { if (source.codepoint == UnicodeSetIterator.IS_STRING) { result.add(source.string); } else { result.add(source.codepoint, source.codepointEnd); } } } /** * Really ought to create a Collection UniqueList, that forces uniqueness. * But for now... */ public static Collection addAllUnique(Collection source, Collection result) { for (Iterator it = source.iterator(); it.hasNext();) { addUnique(it.next(), result); } return result; } /** * Really ought to create a Collection UniqueList, that forces uniqueness. * But for now... */ public static Collection addAllUnique(Object[] source, Collection result) { for (int i = 0; i < source.length; ++i) { addUnique(source[i], result); } return result; } static public class Factory { static boolean DEBUG = false; Map canonicalNames = new TreeMap(); Map skeletonNames = new TreeMap(); Map propertyCache = new HashMap(1); public final Factory add(UnicodeProperty sp) { canonicalNames.put(sp.getName(), sp); List c = sp.getNameAliases(new ArrayList(1)); Iterator it = c.iterator(); while (it.hasNext()) { skeletonNames.put(toSkeleton((String) it.next()), sp); } return this; } public final UnicodeProperty getProperty(String propertyAlias) { return (UnicodeProperty) skeletonNames .get(toSkeleton(propertyAlias)); } public final List getAvailableNames() { return getAvailableNames(null); } public final List getAvailableNames(List result) { if (result == null) result = new ArrayList(1); Iterator it = canonicalNames.keySet().iterator(); while (it.hasNext()) { addUnique(it.next(), result); } return result; } public final List getAvailableNames(int propertyTypeMask) { return getAvailableNames(propertyTypeMask, null); } public final List getAvailableNames(int propertyTypeMask, List result) { if (result == null) result = new ArrayList(1); Iterator it = canonicalNames.keySet().iterator(); while (it.hasNext()) { String item = (String) it.next(); UnicodeProperty property = getProperty(item); if (DEBUG) System.out.println("Properties: " + item + "," + property.getType()); if (!property.isType(propertyTypeMask)) { // System.out.println("Masking: " + property.getType() + "," // + propertyTypeMask); continue; } addUnique(property.getName(), result); } return result; } InversePatternMatcher inverseMatcher = new InversePatternMatcher(); /** * Format is: propname ('=' | '!=') propvalue ( '|' propValue )* */ public final UnicodeSet getSet(String propAndValue, PatternMatcher matcher, UnicodeSet result) { int equalPos = propAndValue.indexOf('='); String prop = propAndValue.substring(0, equalPos); String value = propAndValue.substring(equalPos + 1); boolean negative = false; if (prop.endsWith("!")) { prop = prop.substring(0, prop.length() - 1); negative = true; } prop = prop.trim(); UnicodeProperty up = getProperty(prop); if (matcher == null) { matcher = new SimpleMatcher(value, up .isType(STRING_OR_MISC_MASK) ? null : PROPERTY_COMPARATOR); } if (negative) { inverseMatcher.set(matcher); matcher = inverseMatcher; } return up.getSet(matcher.set(value), result); } public final UnicodeSet getSet(String propAndValue, PatternMatcher matcher) { return getSet(propAndValue, matcher, null); } public final UnicodeSet getSet(String propAndValue) { return getSet(propAndValue, null, null); } public final SymbolTable getSymbolTable(String prefix) { return new PropertySymbolTable(prefix); } private class MyXSymbolTable extends UnicodeSet.XSymbolTable { public boolean applyPropertyAlias(String propertyName, String propertyValue, UnicodeSet result) { if (false) System.out.println(propertyName + "=" + propertyValue); UnicodeProperty prop = getProperty(propertyName); if (prop == null) return false; result.clear(); UnicodeSet x = prop.getSet(propertyValue, result); return x.size() != 0; } } public final UnicodeSet.XSymbolTable getXSymbolTable() { return new MyXSymbolTable(); } private class PropertySymbolTable implements SymbolTable { static final boolean DEBUG = false; private String prefix; RegexMatcher regexMatcher = new RegexMatcher(); PropertySymbolTable(String prefix) { this.prefix = prefix; } public char[] lookup(String s) { if (DEBUG) System.out.println("\t(" + prefix + ")Looking up " + s); // ensure, again, that prefix matches int start = prefix.length(); if (!s.regionMatches(true, 0, prefix, 0, start)) return null; int pos = s.indexOf(':', start); if (pos < 0) { // should never happen throw new IllegalArgumentException( "Internal Error: missing =: " + s + "\r\n"); } UnicodeProperty prop = getProperty(s.substring(start, pos)); if (prop == null) { throw new IllegalArgumentException("Invalid Property in: " + s + "\r\nUse " + showSet(getAvailableNames())); } String value = s.substring(pos + 1); UnicodeSet set; if (value.startsWith("\u00AB")) { // regex! set = prop.getSet(regexMatcher.set(value.substring(1, value .length() - 1))); } else { set = prop.getSet(value); } if (set.size() == 0) { throw new IllegalArgumentException( "Empty Property-Value in: " + s + "\r\nUse " + showSet(prop.getAvailableValues())); } if (DEBUG) System.out.println("\t(" + prefix + ")Returning " + set.toPattern(true)); return set.toPattern(true).toCharArray(); // really ugly } private String showSet(List list) { StringBuffer result = new StringBuffer("["); boolean first = true; for (Iterator it = list.iterator(); it.hasNext();) { if (!first) result.append(", "); else first = false; result.append(it.next().toString()); } result.append("]"); return result.toString(); } public UnicodeMatcher lookupMatcher(int ch) { return null; } public String parseReference(String text, ParsePosition pos, int limit) { if (DEBUG) System.out.println("\t(" + prefix + ")Parsing <" + text.substring(pos.getIndex(), limit) + ">"); int start = pos.getIndex(); // ensure that it starts with 'prefix' if (!text .regionMatches(true, start, prefix, 0, prefix.length())) return null; start += prefix.length(); // now see if it is of the form identifier:identifier int i = getIdentifier(text, start, limit); if (i == start) return null; String prop = text.substring(start, i); String value = "true"; if (i < limit) { if (text.charAt(i) == ':') { int j; if (text.charAt(i + 1) == '\u00AB') { // regular // expression j = text.indexOf('\u00BB', i + 2) + 1; // include // last // character if (j <= 0) return null; } else { j = getIdentifier(text, i + 1, limit); } value = text.substring(i + 1, j); i = j; } } pos.setIndex(i); if (DEBUG) System.out.println("\t(" + prefix + ")Parsed <" + prop + ">=<" + value + ">"); return prefix + prop + ":" + value; } private int getIdentifier(String text, int start, int limit) { if (DEBUG) System.out.println("\tGetID <" + text.substring(start, limit) + ">"); int cp = 0; int i; for (i = start; i < limit; i += UTF16.getCharCount(cp)) { cp = UTF16.charAt(text, i); if (!com.ibm.icu.lang.UCharacter .isUnicodeIdentifierPart(cp) && cp != '.') { break; } } if (DEBUG) System.out.println("\tGotID <" + text.substring(start, i) + ">"); return i; } } } public static class FilteredProperty extends UnicodeProperty { private UnicodeProperty property; protected StringFilter filter; protected UnicodeSetIterator matchIterator = new UnicodeSetIterator( new UnicodeSet(0, 0x10FFFF)); protected HashMap backmap; boolean allowValueAliasCollisions = false; public FilteredProperty(UnicodeProperty property, StringFilter filter) { this.property = property; this.filter = filter; } public StringFilter getFilter() { return filter; } public UnicodeProperty setFilter(StringFilter filter) { this.filter = filter; return this; } List temp = new ArrayList(1); public List _getAvailableValues(List result) { temp.clear(); return filter.addUnique(property.getAvailableValues(temp), result); } public List _getNameAliases(List result) { temp.clear(); return filter.addUnique(property.getNameAliases(temp), result); } public String _getValue(int codepoint) { return filter.remap(property.getValue(codepoint)); } public List _getValueAliases(String valueAlias, List result) { if (backmap == null) { backmap = new HashMap(1); temp.clear(); Iterator it = property.getAvailableValues(temp).iterator(); while (it.hasNext()) { String item = (String) it.next(); String mappedItem = filter.remap(item); if (backmap.get(mappedItem) != null && !allowValueAliasCollisions) { throw new IllegalArgumentException( "Filter makes values collide! " + item + ", " + mappedItem); } backmap.put(mappedItem, item); } } valueAlias = (String) backmap.get(valueAlias); temp.clear(); return filter.addUnique(property.getValueAliases(valueAlias, temp), result); } public String _getVersion() { return property.getVersion(); } public boolean isAllowValueAliasCollisions() { return allowValueAliasCollisions; } public FilteredProperty setAllowValueAliasCollisions(boolean b) { allowValueAliasCollisions = b; return this; } } public static abstract class StringFilter implements Cloneable { public abstract String remap(String original); public final List addUnique(Collection source, List result) { if (result == null) result = new ArrayList(1); Iterator it = source.iterator(); while (it.hasNext()) { UnicodeProperty.addUnique(remap((String) it.next()), result); } return result; } /* * public Object clone() { try { return super.clone(); } catch * (CloneNotSupportedException e) { throw new * IllegalStateException("Should never happen."); } } */ } public static class MapFilter extends StringFilter { private Map valueMap; public MapFilter(Map valueMap) { this.valueMap = valueMap; } public String remap(String original) { Object changed = valueMap.get(original); return changed == null ? original : (String) changed; } public Map getMap() { return valueMap; } } public interface PatternMatcher extends ObjectMatcher { public PatternMatcher set(String pattern); } public static class InversePatternMatcher extends InverseMatcher implements PatternMatcher { PatternMatcher other; public PatternMatcher set(PatternMatcher toInverse) { other = toInverse; return this; } public boolean matches(Object value) { return !other.matches(value); } public PatternMatcher set(String pattern) { other.set(pattern); return this; } } public static class SimpleMatcher implements PatternMatcher { Comparator comparator; String pattern; public SimpleMatcher(String pattern, Comparator comparator) { this.comparator = comparator; this.pattern = pattern; } public boolean matches(Object value) { if (comparator == null) return pattern.equals(value); return comparator.compare(pattern, value) == 0; } public PatternMatcher set(String pattern) { this.pattern = pattern; return this; } } public static class RegexMatcher implements UnicodeProperty.PatternMatcher { private java.util.regex.Matcher matcher; public UnicodeProperty.PatternMatcher set(String pattern) { matcher = Pattern.compile(pattern).matcher(""); return this; } UFormat foo; public boolean matches(Object value) { matcher.reset(value.toString()); return matcher.find(); } } public static abstract class BaseProperty extends UnicodeProperty { private static final String[] NO_VALUES = {"No", "N", "F", "False"}; private static final String[] YES_VALUES = {"Yes", "Y", "T", "True"}; /** * */ private static final String[][] YES_NO_ALIASES = new String[][] {YES_VALUES, NO_VALUES}; protected List propertyAliases = new ArrayList(1); protected Map toValueAliases; protected String version; public BaseProperty setMain(String alias, String shortAlias, int propertyType, String version) { setName(alias); setType(propertyType); propertyAliases.add(shortAlias); propertyAliases.add(alias); if (propertyType == BINARY) { addValueAliases(YES_NO_ALIASES, false); } this.version = version; return this; } public String _getVersion() { return version; } public List _getNameAliases(List result) { addAllUnique(propertyAliases, result); return result; } public BaseProperty addValueAliases(String[][] valueAndAlternates, boolean errorIfCant) { if (toValueAliases == null) _fixValueAliases(); for (int i = 0; i < valueAndAlternates.length; ++i) { for (int j = 1; j < valueAndAlternates[0].length; ++j) { addValueAlias(valueAndAlternates[i][0], valueAndAlternates[i][j], errorIfCant); } } return this; } public void addValueAlias(String value, String valueAlias, boolean errorIfCant) { List result = (List) toValueAliases.get(value); if (result == null && !errorIfCant) return; addUnique(value, result); addUnique(valueAlias, result); } protected List _getValueAliases(String valueAlias, List result) { if (toValueAliases == null) _fixValueAliases(); List a = (List) toValueAliases.get(valueAlias); if (a != null) addAllUnique(a, result); return result; } protected void _fixValueAliases() { if (toValueAliases == null) toValueAliases = new HashMap(1); for (Iterator it = getAvailableValues().iterator(); it.hasNext();) { Object value = it.next(); _ensureValueInAliases(value); } } protected void _ensureValueInAliases(Object value) { List result = (List) toValueAliases.get(value); if (result == null) toValueAliases.put(value, result = new ArrayList(1)); addUnique(value, result); } public BaseProperty swapFirst2ValueAliases() { for (Iterator it = toValueAliases.keySet().iterator(); it.hasNext();) { List list = (List) toValueAliases.get(it.next()); if (list.size() < 2) continue; Object first = list.get(0); list.set(0, list.get(1)); list.set(1, first); } return this; } /** * @param string * @return */ public UnicodeProperty addName(String string) { throw new UnsupportedOperationException(); } } public static abstract class SimpleProperty extends BaseProperty { List values; public UnicodeProperty addName(String alias) { propertyAliases.add(alias); return this; } public SimpleProperty setValues(String valueAlias) { _addToValues(valueAlias, null); return this; } public SimpleProperty addAliases(String valueAlias, String... aliases) { _addToValues(valueAlias, null); return this; } public SimpleProperty setValues(String[] valueAliases, String[] alternateValueAliases) { for (int i = 0; i < valueAliases.length; ++i) { if (valueAliases[i].equals(UNUSED)) continue; _addToValues( valueAliases[i], alternateValueAliases != null ? alternateValueAliases[i] : null); } return this; } public SimpleProperty setValues(List valueAliases) { this.values = new ArrayList(valueAliases); for (Iterator it = this.values.iterator(); it.hasNext();) { _addToValues((String) it.next(), null); } return this; } public List _getAvailableValues(List result) { if (values == null) _fillValues(); result.addAll(values); return result; } protected void _fillValues() { List newvalues = (List) getUnicodeMap_internal() .getAvailableValues(new ArrayList()); for (Iterator it = newvalues.iterator(); it.hasNext();) { _addToValues((String) it.next(), null); } } private void _addToValues(String item, String alias) { if (values == null) values = new ArrayList(1); if (toValueAliases == null) _fixValueAliases(); addUnique(item, values); _ensureValueInAliases(item); addValueAlias(item, alias, true); } /* public String _getVersion() { return version; } */ } public static class UnicodeMapProperty extends BaseProperty { /* * Example of usage: * new UnicodeProperty.UnicodeMapProperty() { { unicodeMap = new UnicodeMap(); unicodeMap.setErrorOnReset(true); unicodeMap.put(0xD, "CR"); unicodeMap.put(0xA, "LF"); UnicodeProperty cat = getProperty("General_Category"); UnicodeSet temp = cat.getSet("Line_Separator") .addAll(cat.getSet("Paragraph_Separator")) .addAll(cat.getSet("Control")) .addAll(cat.getSet("Format")) .remove(0xD).remove(0xA).remove(0x200C).remove(0x200D); unicodeMap.putAll(temp, "Control"); UnicodeSet graphemeExtend = getProperty("Grapheme_Extend").getSet("true"); unicodeMap.putAll(graphemeExtend,"Extend"); UnicodeProperty hangul = getProperty("Hangul_Syllable_Type"); unicodeMap.putAll(hangul.getSet("L"),"L"); unicodeMap.putAll(hangul.getSet("V"),"V"); unicodeMap.putAll(hangul.getSet("T"),"T"); unicodeMap.putAll(hangul.getSet("LV"),"LV"); unicodeMap.putAll(hangul.getSet("LVT"),"LVT"); unicodeMap.setMissing("Other"); } }.setMain("Grapheme_Cluster_Break", "GCB", UnicodeProperty.ENUMERATED, version) */ protected UnicodeMap unicodeMap; public UnicodeMapProperty set(UnicodeMap map) { unicodeMap = map; return this; } protected String _getValue(int codepoint) { return (String) unicodeMap.getValue(codepoint); } /* protected List _getValueAliases(String valueAlias, List result) { if (!unicodeMap.getAvailableValues().contains(valueAlias)) return result; result.add(valueAlias); return result; // no other aliases } */protected List _getAvailableValues(List result) { return (List) unicodeMap.getAvailableValues(result); } } public boolean isValidValue(String propertyValue) { if (isType(STRING_OR_MISC_MASK)) { return true; } Collection<String> values = (Collection<String>) getAvailableValues(); for (String valueAlias : values) { if (UnicodeProperty.compareNames(valueAlias, propertyValue) == 0) { return true; } for (String valueAlias2 : (Collection<String>) getValueAliases(valueAlias)) { if (UnicodeProperty.compareNames(valueAlias2, propertyValue) == 0) { return true; } } } return false; } public List<String> getValueAliases() { List<String> result = new ArrayList(); if (isType(STRING_OR_MISC_MASK)) { return result; } Collection<String> values = (Collection<String>) getAvailableValues(); for (String valueAlias : values) { UnicodeProperty.addAllUnique(getValueAliases(valueAlias), result); } result.removeAll(values); return result; } public static UnicodeSet addUntested(UnicodeSet result, boolean uniformUnassigned) { if (!uniformUnassigned) return result; if (result.contains(UnicodeProperty.SAMPLE_UNASSIGNED)) { result.addAll(UnicodeProperty.UNASSIGNED); } if (result.contains(UnicodeProperty.SAMPLE_PRIVATE_USE)) { result.addAll(UnicodeProperty.PRIVATE_USE); } if (result.contains(UnicodeProperty.SAMPLE_SURROGATE)) { result.addAll(UnicodeProperty.SURROGATE); } return result; } public static UnicodeMap addUntested(UnicodeMap result, boolean uniformUnassigned) { if (!uniformUnassigned) return result; Object temp; if (null != (temp = result.get(UnicodeProperty.SAMPLE_UNASSIGNED))) { result.putAll(UnicodeProperty.UNASSIGNED, temp); } if (null != (temp = result.get(UnicodeProperty.SAMPLE_PRIVATE_USE))) { result.putAll(UnicodeProperty.PRIVATE_USE, temp); } if (null != (temp = result.get(UnicodeProperty.SAMPLE_SURROGATE))) { result.putAll(UnicodeProperty.SURROGATE, temp); } return result; } public boolean isDefault(int cp) { String value = getValue(cp); if (isType(STRING_OR_MISC_MASK)) { return equals(cp, value); } String defaultValue = getValue(SAMPLE_UNASSIGNED); return defaultValue == null ? value == null : defaultValue.equals(value); } public boolean hasUniformUnassigned() { return hasUniformUnassigned; } protected UnicodeProperty setUniformUnassigned(boolean hasUniformUnassigned) { this.hasUniformUnassigned = hasUniformUnassigned; return this; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.compute.v2020_06_01.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.CloudException; import com.microsoft.azure.management.compute.v2020_06_01.RequestRateByIntervalInput; import com.microsoft.azure.management.compute.v2020_06_01.ThrottledRequestsInput; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Path; import retrofit2.http.POST; import retrofit2.http.Query; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; import com.microsoft.azure.LongRunningFinalState; import com.microsoft.azure.LongRunningOperationOptions; /** * An instance of this class provides access to all the operations defined * in LogAnalytics. */ public class LogAnalyticsInner { /** The Retrofit service to perform REST calls. */ private LogAnalyticsService service; /** The service client containing this operation class. */ private ComputeManagementClientImpl client; /** * Initializes an instance of LogAnalyticsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public LogAnalyticsInner(Retrofit retrofit, ComputeManagementClientImpl client) { this.service = retrofit.create(LogAnalyticsService.class); this.client = client; } /** * The interface defining all the services for LogAnalytics to be * used by Retrofit to perform actually REST calls. */ interface LogAnalyticsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2020_06_01.LogAnalytics exportRequestRateByInterval" }) @POST("subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/logAnalytics/apiAccess/getRequestRateByInterval") Observable<Response<ResponseBody>> exportRequestRateByInterval(@Path("location") String location, @Path("subscriptionId") String subscriptionId, @Body RequestRateByIntervalInput parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2020_06_01.LogAnalytics beginExportRequestRateByInterval" }) @POST("subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/logAnalytics/apiAccess/getRequestRateByInterval") Observable<Response<ResponseBody>> beginExportRequestRateByInterval(@Path("location") String location, @Path("subscriptionId") String subscriptionId, @Body RequestRateByIntervalInput parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2020_06_01.LogAnalytics exportThrottledRequests" }) @POST("subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/logAnalytics/apiAccess/getThrottledRequests") Observable<Response<ResponseBody>> exportThrottledRequests(@Path("location") String location, @Path("subscriptionId") String subscriptionId, @Body ThrottledRequestsInput parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2020_06_01.LogAnalytics beginExportThrottledRequests" }) @POST("subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/logAnalytics/apiAccess/getThrottledRequests") Observable<Response<ResponseBody>> beginExportThrottledRequests(@Path("location") String location, @Path("subscriptionId") String subscriptionId, @Body ThrottledRequestsInput parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the LogAnalyticsOperationResultInner object if successful. */ public LogAnalyticsOperationResultInner exportRequestRateByInterval(String location, RequestRateByIntervalInput parameters) { return exportRequestRateByIntervalWithServiceResponseAsync(location, parameters).toBlocking().last().body(); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<LogAnalyticsOperationResultInner> exportRequestRateByIntervalAsync(String location, RequestRateByIntervalInput parameters, final ServiceCallback<LogAnalyticsOperationResultInner> serviceCallback) { return ServiceFuture.fromResponse(exportRequestRateByIntervalWithServiceResponseAsync(location, parameters), serviceCallback); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<LogAnalyticsOperationResultInner> exportRequestRateByIntervalAsync(String location, RequestRateByIntervalInput parameters) { return exportRequestRateByIntervalWithServiceResponseAsync(location, parameters).map(new Func1<ServiceResponse<LogAnalyticsOperationResultInner>, LogAnalyticsOperationResultInner>() { @Override public LogAnalyticsOperationResultInner call(ServiceResponse<LogAnalyticsOperationResultInner> response) { return response.body(); } }); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<LogAnalyticsOperationResultInner>> exportRequestRateByIntervalWithServiceResponseAsync(String location, RequestRateByIntervalInput parameters) { if (location == null) { throw new IllegalArgumentException("Parameter location is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2020-06-01"; Observable<Response<ResponseBody>> observable = service.exportRequestRateByInterval(location, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new LongRunningOperationOptions().withFinalStateVia(LongRunningFinalState.AZURE_ASYNC_OPERATION), new TypeToken<LogAnalyticsOperationResultInner>() { }.getType()); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the LogAnalyticsOperationResultInner object if successful. */ public LogAnalyticsOperationResultInner beginExportRequestRateByInterval(String location, RequestRateByIntervalInput parameters) { return beginExportRequestRateByIntervalWithServiceResponseAsync(location, parameters).toBlocking().single().body(); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<LogAnalyticsOperationResultInner> beginExportRequestRateByIntervalAsync(String location, RequestRateByIntervalInput parameters, final ServiceCallback<LogAnalyticsOperationResultInner> serviceCallback) { return ServiceFuture.fromResponse(beginExportRequestRateByIntervalWithServiceResponseAsync(location, parameters), serviceCallback); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the LogAnalyticsOperationResultInner object */ public Observable<LogAnalyticsOperationResultInner> beginExportRequestRateByIntervalAsync(String location, RequestRateByIntervalInput parameters) { return beginExportRequestRateByIntervalWithServiceResponseAsync(location, parameters).map(new Func1<ServiceResponse<LogAnalyticsOperationResultInner>, LogAnalyticsOperationResultInner>() { @Override public LogAnalyticsOperationResultInner call(ServiceResponse<LogAnalyticsOperationResultInner> response) { return response.body(); } }); } /** * Export logs that show Api requests made by this subscription in the given time window to show throttling activities. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getRequestRateByInterval Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the LogAnalyticsOperationResultInner object */ public Observable<ServiceResponse<LogAnalyticsOperationResultInner>> beginExportRequestRateByIntervalWithServiceResponseAsync(String location, RequestRateByIntervalInput parameters) { if (location == null) { throw new IllegalArgumentException("Parameter location is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2020-06-01"; return service.beginExportRequestRateByInterval(location, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<LogAnalyticsOperationResultInner>>>() { @Override public Observable<ServiceResponse<LogAnalyticsOperationResultInner>> call(Response<ResponseBody> response) { try { ServiceResponse<LogAnalyticsOperationResultInner> clientResponse = beginExportRequestRateByIntervalDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<LogAnalyticsOperationResultInner> beginExportRequestRateByIntervalDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<LogAnalyticsOperationResultInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<LogAnalyticsOperationResultInner>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the LogAnalyticsOperationResultInner object if successful. */ public LogAnalyticsOperationResultInner exportThrottledRequests(String location, ThrottledRequestsInput parameters) { return exportThrottledRequestsWithServiceResponseAsync(location, parameters).toBlocking().last().body(); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<LogAnalyticsOperationResultInner> exportThrottledRequestsAsync(String location, ThrottledRequestsInput parameters, final ServiceCallback<LogAnalyticsOperationResultInner> serviceCallback) { return ServiceFuture.fromResponse(exportThrottledRequestsWithServiceResponseAsync(location, parameters), serviceCallback); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<LogAnalyticsOperationResultInner> exportThrottledRequestsAsync(String location, ThrottledRequestsInput parameters) { return exportThrottledRequestsWithServiceResponseAsync(location, parameters).map(new Func1<ServiceResponse<LogAnalyticsOperationResultInner>, LogAnalyticsOperationResultInner>() { @Override public LogAnalyticsOperationResultInner call(ServiceResponse<LogAnalyticsOperationResultInner> response) { return response.body(); } }); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<LogAnalyticsOperationResultInner>> exportThrottledRequestsWithServiceResponseAsync(String location, ThrottledRequestsInput parameters) { if (location == null) { throw new IllegalArgumentException("Parameter location is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2020-06-01"; Observable<Response<ResponseBody>> observable = service.exportThrottledRequests(location, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new LongRunningOperationOptions().withFinalStateVia(LongRunningFinalState.AZURE_ASYNC_OPERATION), new TypeToken<LogAnalyticsOperationResultInner>() { }.getType()); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the LogAnalyticsOperationResultInner object if successful. */ public LogAnalyticsOperationResultInner beginExportThrottledRequests(String location, ThrottledRequestsInput parameters) { return beginExportThrottledRequestsWithServiceResponseAsync(location, parameters).toBlocking().single().body(); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<LogAnalyticsOperationResultInner> beginExportThrottledRequestsAsync(String location, ThrottledRequestsInput parameters, final ServiceCallback<LogAnalyticsOperationResultInner> serviceCallback) { return ServiceFuture.fromResponse(beginExportThrottledRequestsWithServiceResponseAsync(location, parameters), serviceCallback); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the LogAnalyticsOperationResultInner object */ public Observable<LogAnalyticsOperationResultInner> beginExportThrottledRequestsAsync(String location, ThrottledRequestsInput parameters) { return beginExportThrottledRequestsWithServiceResponseAsync(location, parameters).map(new Func1<ServiceResponse<LogAnalyticsOperationResultInner>, LogAnalyticsOperationResultInner>() { @Override public LogAnalyticsOperationResultInner call(ServiceResponse<LogAnalyticsOperationResultInner> response) { return response.body(); } }); } /** * Export logs that show total throttled Api requests for this subscription in the given time window. * * @param location The location upon which virtual-machine-sizes is queried. * @param parameters Parameters supplied to the LogAnalytics getThrottledRequests Api. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the LogAnalyticsOperationResultInner object */ public Observable<ServiceResponse<LogAnalyticsOperationResultInner>> beginExportThrottledRequestsWithServiceResponseAsync(String location, ThrottledRequestsInput parameters) { if (location == null) { throw new IllegalArgumentException("Parameter location is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } Validator.validate(parameters); final String apiVersion = "2020-06-01"; return service.beginExportThrottledRequests(location, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<LogAnalyticsOperationResultInner>>>() { @Override public Observable<ServiceResponse<LogAnalyticsOperationResultInner>> call(Response<ResponseBody> response) { try { ServiceResponse<LogAnalyticsOperationResultInner> clientResponse = beginExportThrottledRequestsDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<LogAnalyticsOperationResultInner> beginExportThrottledRequestsDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<LogAnalyticsOperationResultInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<LogAnalyticsOperationResultInner>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/* * Copyright (C) 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gapid.models; import static com.google.gapid.proto.service.memory.Memory.PoolNames.Application_VALUE; import static com.google.gapid.util.Paths.commandTree; import static com.google.gapid.util.Paths.lastCommand; import static com.google.gapid.util.Paths.observationsAfter; import static com.google.gapid.widgets.Widgets.submitIfNotDisposed; import static java.util.logging.Level.FINE; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.gapid.models.ApiContext.FilteringContext; import com.google.gapid.proto.service.Service; import com.google.gapid.proto.service.api.API; import com.google.gapid.proto.service.path.Path; import com.google.gapid.rpc.Rpc; import com.google.gapid.rpc.RpcException; import com.google.gapid.rpc.UiCallback; import com.google.gapid.server.Client; import com.google.gapid.util.Events; import com.google.gapid.util.Loadable; import com.google.gapid.util.Messages; import com.google.gapid.util.Paths; import com.google.gapid.util.Ranges; import org.eclipse.swt.widgets.Shell; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.logging.Logger; /** * Model containing the API commands (atoms) of the capture. */ public class AtomStream extends ModelBase.ForPath<AtomStream.Node, Void, AtomStream.Listener> implements ApiContext.Listener, Capture.Listener { protected static final Logger LOG = Logger.getLogger(AtomStream.class.getName()); private final Capture capture; private final ApiContext context; private final ConstantSets constants; private AtomIndex selection; public AtomStream( Shell shell, Client client, Capture capture, ApiContext context, ConstantSets constants) { super(LOG, shell, client, Listener.class); this.capture = capture; this.context = context; this.constants = constants; capture.addListener(this); context.addListener(this); } @Override public void onCaptureLoadingStart(boolean maintainState) { if (!maintainState) { selection = null; } reset(); } @Override public void onCaptureLoaded(Loadable.Message error) { if (error == null && selection != null) { selection = selection.withCapture(capture.getData()); if (isLoaded()) { resolve(selection.getCommand(), node -> selectAtoms(selection.withNode(node), true)); } } } @Override public void onContextsLoaded() { onContextSelected(context.getSelectedContext()); } @Override public void onContextSelected(FilteringContext ctx) { if (selection != null && selection.getNode() != null) { // Clear the node, so the selection will be re-resolved once the context has updated. selection = selection.withNode(null); } load(commandTree(capture.getData(), ctx), false); } @Override protected ListenableFuture<Node> doLoad(Path.Any path) { return Futures.transformAsync(client.get(path), tree -> Futures.transform(client.get(Paths.toAny(tree.getCommandTree().getRoot())), val -> new RootNode( tree.getCommandTree().getRoot().getTree(), val.getCommandTreeNode()))); } public ListenableFuture<Node> load(Node node) { return node.load(shell, () -> Futures.transformAsync( client.get(Paths.toAny(node.getPath(Path.CommandTreeNode.newBuilder()))), v1 -> { Service.CommandTreeNode data = v1.getCommandTreeNode(); if (data.getGroup().isEmpty() && data.hasCommands()) { return Futures.transform( loadCommand(lastCommand(data.getCommands())), cmd -> new NodeData(data, cmd)); } return Futures.immediateFuture(new NodeData(data, null)); })); } public ListenableFuture<API.Command> loadCommand(Path.Command path) { return Futures.transformAsync(client.get(Paths.toAny(path)), value -> Futures.transform(constants.loadConstants(value.getCommand()), ignore -> value.getCommand())); } public void load(Node node, Runnable callback) { ListenableFuture<Node> future = load(node); if (future != null) { Rpc.listen(future, new UiCallback<Node, Node>(shell, LOG) { @Override protected Node onRpcThread(Rpc.Result<Node> result) throws RpcException, ExecutionException { return result.get(); } @Override protected void onUiThread(Node result) { callback.run(); } }); } } @Override protected void fireLoadStartEvent() { listeners.fire().onAtomsLoadingStart(); } @Override protected void fireLoadedEvent() { listeners.fire().onAtomsLoaded(); if (selection != null) { selectAtoms(selection, true); } } public AtomIndex getSelectedAtoms() { return (selection != null && selection.getNode() != null) ? selection : null; } public void selectAtoms(AtomIndex index, boolean force) { if (!force && Objects.equal(selection, index)) { return; } else if (!isLoaded()) { this.selection = index; return; } RootNode root = (RootNode)getData(); if (index.getNode() == null) { resolve(index.getCommand(), node -> selectAtoms(index.withNode(node), force)); } else if (!index.getNode().getTree().equals(root.tree)) { // TODO throw new UnsupportedOperationException("This is not yet supported, needs API clarification"); } else { selection = index; listeners.fire().onAtomsSelected(selection); } } private void resolve(Path.Command command, Consumer<Path.CommandTreeNode> cb) { Rpc.listen(client.get(commandTree(((RootNode)getData()).tree, command)), new UiCallback<Service.Value, Path.CommandTreeNode>(shell, LOG) { @Override protected Path.CommandTreeNode onRpcThread(Rpc.Result<Service.Value> result) throws RpcException, ExecutionException { Service.Value value = result.get(); LOG.log(FINE, "Resolved selection to {0}", value); return value.getPath().getCommandTreeNode(); } @Override protected void onUiThread(Path.CommandTreeNode result) { cb.accept(result); } }); } public ListenableFuture<Observation[]> getObservations(AtomIndex index) { return Futures.transform(client.get(observationsAfter(index, Application_VALUE)), v -> { Service.Memory mem = v.getMemory(); Observation[] obs = new Observation[mem.getReadsCount() + mem.getWritesCount()]; int idx = 0; for (Service.MemoryRange read : mem.getReadsList()) { obs[idx++] = new Observation(index, true, read); } for (Service.MemoryRange write : mem.getWritesList()) { obs[idx++] = new Observation(index, false, write); } return obs; }); } /** * Read or write memory observation at a specific command. */ public static class Observation { public static final Observation NULL_OBSERVATION = new Observation(null, false, null) { @Override public String toString() { return Messages.SELECT_OBSERVATION; } @Override public boolean contains(long address) { return false; } }; private final AtomIndex index; private final boolean read; private final Service.MemoryRange range; public Observation(AtomIndex index, boolean read, Service.MemoryRange range) { this.index = index; this.read = read; this.range = range; } public Path.Memory getPath() { return Paths.memoryAfter(index, Application_VALUE, range).getMemory(); } public boolean contains(long address) { return Ranges.contains(range, address); } @Override public String toString() { long base = range.getBase(), count = range.getSize(); return (read ? "Read " : "Write ") + count + " byte" + (count == 1 ? "" : "s") + String.format(" at 0x%016x", base); } } /** * An index into the atom stream, representing a specific "point in time" in the trace. */ public static class AtomIndex implements Comparable<AtomIndex> { private final Path.Command command; private final Path.CommandTreeNode node; private final boolean group; private AtomIndex(Path.Command command, Path.CommandTreeNode node, boolean group) { this.command = command; this.node = node; this.group = group; } /** * Create an index pointing to the given command and node. */ public static AtomIndex forNode(Path.Command command, Path.CommandTreeNode node) { return new AtomIndex(command, node, false); } /** * Create an index pointing to the given command, without knowing the tree node. * The tree nodes is then resolved when it is needed. */ public static AtomIndex forCommand(Path.Command command) { return new AtomIndex(command, null, false); } /** * Same as {@link #forCommand}, except that group selection is to be preferred when * resolving to a tree node. */ public static AtomIndex forGroup(Path.Command command) { return new AtomIndex(command, null, true); } public AtomIndex withNode(Path.CommandTreeNode newNode) { return new AtomIndex(command, newNode, group); } public AtomIndex withCapture(Path.Capture capture) { return new AtomIndex(command.toBuilder().setCapture(capture).build(), null, group); } public Path.Command getCommand() { return command; } public Path.CommandTreeNode getNode() { return node; } public boolean isGroup() { return group; } @Override public String toString() { return command.getIndicesList().toString(); } @Override public int hashCode() { return command.getIndicesList().hashCode(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (!(obj instanceof AtomIndex)) { return false; } return command.getIndicesList().equals(((AtomIndex)obj).command.getIndicesList()); } @Override public int compareTo(AtomIndex o) { return Paths.compare(command, o.command); } } public static class Node { private final Node parent; private final int index; private Node[] children; private Service.CommandTreeNode data; private API.Command command; private ListenableFuture<Node> loadFuture; public Node(Service.CommandTreeNode data) { this(null, 0); this.data = data; } public Node(Node parent, int index) { this.parent = parent; this.index = index; } public Node getParent() { return parent; } public int getChildCount() { return (data == null) ? 0 : (int)data.getNumChildren(); } public Node getChild(int child) { return getOrCreateChildren()[child]; } public Node[] getChildren() { return getOrCreateChildren().clone(); } private Node[] getOrCreateChildren() { if (children == null) { Preconditions.checkState(data != null, "Querying children before loaded"); children = new Node[(int)data.getNumChildren()]; for (int i = 0; i < children.length; i++) { children[i] = new Node(this, i); } } return children; } public boolean isLastChild() { return parent == null || (parent.getChildCount() - 1 == index); } public Service.CommandTreeNode getData() { return data; } public API.Command getCommand() { return command; } public Path.CommandTreeNode.Builder getPath(Path.CommandTreeNode.Builder path) { return parent.getPath(path).addIndices(index); } public AtomIndex getIndex() { return (data == null) ? null : AtomIndex.forNode(data.getRepresentation(), getPath(Path.CommandTreeNode.newBuilder()).build()); } public ListenableFuture<Node> load(Shell shell, Supplier<ListenableFuture<NodeData>> loader) { if (data != null) { // Already loaded. return null; } else if (loadFuture != null && !loadFuture.isCancelled()) { return loadFuture; } return loadFuture = Futures.transformAsync(loader.get(), newData -> submitIfNotDisposed(shell, () -> { data = newData.data; command = newData.command; loadFuture = null; // Don't hang on to listeners. return Node.this; })); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (!(obj instanceof Node)) { return false; } Node n = (Node)obj; return index == n.index && parent.equals(n.parent); } @Override public int hashCode() { return parent.hashCode() * 31 + index; } @Override public String toString() { return parent + "/" + index + (data == null ? "" : " " + data.getGroup() + data.getCommands().getToList()); } } private static class RootNode extends Node { public final Path.ID tree; public RootNode(Path.ID tree, Service.CommandTreeNode data) { super(data); this.tree = tree; } @Override public Path.CommandTreeNode.Builder getPath(Path.CommandTreeNode.Builder path) { return path.setTree(tree); } @Override public String toString() { return "Root"; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } else if (!(obj instanceof RootNode)) { return false; } return tree.equals(((RootNode)obj).tree); } @Override public int hashCode() { return tree.hashCode(); } } private static class NodeData { public final Service.CommandTreeNode data; public final API.Command command; public NodeData(Service.CommandTreeNode data, API.Command command) { this.data = data; this.command = command; } } public interface Listener extends Events.Listener { /** * Event indicating that the tree root has changed and is being loaded. */ public default void onAtomsLoadingStart() { /* emtpy */ } /** * Event indicating that the tree root has finished loading. */ public default void onAtomsLoaded() { /* empty */ } /** * Event indicating that the currently selected command range has changed. */ @SuppressWarnings("unused") public default void onAtomsSelected(AtomIndex selection) { /* empty */ } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.scheduler.resource; import java.util.concurrent.ConcurrentHashMap; import org.apache.storm.networktopography.DNSToSwitchMapping; import org.apache.storm.scheduler.resource.normalization.NormalizedResources; import org.apache.storm.Config; import org.apache.storm.DaemonConfig; import org.apache.storm.generated.Bolt; import org.apache.storm.generated.SpoutSpec; import org.apache.storm.generated.StormTopology; import org.apache.storm.scheduler.Cluster; import org.apache.storm.scheduler.ExecutorDetails; import org.apache.storm.scheduler.INimbus; import org.apache.storm.scheduler.IScheduler; import org.apache.storm.scheduler.ISchedulingState; import org.apache.storm.scheduler.SchedulerAssignment; import org.apache.storm.scheduler.SupervisorDetails; import org.apache.storm.scheduler.Topologies; import org.apache.storm.scheduler.TopologyDetails; import org.apache.storm.scheduler.WorkerSlot; import org.apache.storm.scheduler.resource.strategies.priority.DefaultSchedulingPriorityStrategy; import org.apache.storm.scheduler.resource.strategies.scheduling.ConstraintSolverStrategy; import org.apache.storm.scheduler.resource.strategies.scheduling.DefaultResourceAwareStrategy; import org.apache.storm.scheduler.resource.strategies.scheduling.GenericResourceAwareStrategy; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.OutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.topology.BoltDeclarer; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.topology.SpoutDeclarer; import org.apache.storm.topology.TopologyBuilder; import org.apache.storm.topology.base.BaseRichBolt; import org.apache.storm.topology.base.BaseRichSpout; import org.apache.storm.tuple.Fields; import org.apache.storm.tuple.Tuple; import org.apache.storm.tuple.Values; import org.apache.storm.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; public class TestUtilsForResourceAwareScheduler { private static final Logger LOG = LoggerFactory.getLogger(TestUtilsForResourceAwareScheduler.class); public static class TestUserResources { private final String name; private final Map<String, Number> resources = new HashMap<>(); public TestUserResources(String name, double cpu, double mem) { this.name = name; resources.put("cpu", cpu); resources.put("memory", mem); } public void addSelfTo(Map<String, Map<String, Number>> fullPool) { if (fullPool.put(name, resources) != null) { throw new IllegalStateException("Cannot have 2 copies of " + name + " in a pool"); } } } public static TestUserResources userRes(String name, double cpu, double mem) { return new TestUserResources(name, cpu, mem); } public static Map<String, Double> toDouble(Map<String, Number> resources) { Map<String, Double> ret = new HashMap<>(); for (Entry<String, Number> entry : resources.entrySet()) { ret.put(entry.getKey(), entry.getValue().doubleValue()); } return ret; } public static Map<String, Map<String, Number>> userResourcePool(TestUserResources... resources) { Map<String, Map<String, Number>> ret = new HashMap<>(); for (TestUserResources res : resources) { res.addSelfTo(ret); } return ret; } public static Config createCSSClusterConfig(double compPcore, double compOnHeap, double compOffHeap, Map<String, Map<String, Number>> pools) { Config config = createClusterConfig(compPcore, compOnHeap, compOffHeap, pools); config.put(Config.TOPOLOGY_SCHEDULER_STRATEGY, ConstraintSolverStrategy.class.getName()); return config; } public static Config createGrasClusterConfig(double compPcore, double compOnHeap, double compOffHeap, Map<String, Map<String, Number>> pools, Map<String, Double> genericResourceMap) { Config config = createClusterConfig(compPcore, compOnHeap, compOffHeap, pools); config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_MAP, genericResourceMap); config.put(Config.TOPOLOGY_SCHEDULER_STRATEGY, GenericResourceAwareStrategy.class.getName()); return config; } public static Config createClusterConfig(double compPcore, double compOnHeap, double compOffHeap, Map<String, Map<String, Number>> pools) { Config config = new Config(); config.putAll(Utils.readDefaultConfig()); config.put(Config.STORM_NETWORK_TOPOGRAPHY_PLUGIN, GenSupervisorsDnsToSwitchMapping.class.getName()); config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_PRIORITY_STRATEGY, DefaultSchedulingPriorityStrategy.class.getName()); config.put(Config.TOPOLOGY_SCHEDULER_STRATEGY, DefaultResourceAwareStrategy.class.getName()); config.put(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT, compPcore); config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_OFFHEAP_MEMORY_MB, compOffHeap); config.put(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB, compOnHeap); if (pools != null) { config.put(DaemonConfig.RESOURCE_AWARE_SCHEDULER_USER_POOLS, pools); } return config; } public static Map<String, SupervisorDetails> genSupervisors(int numSup, int numPorts, double cpu, double mem) { return genSupervisors(numSup, numPorts, 0, cpu, mem); } public static Map<String, SupervisorDetails> genSupervisors(int numSup, int numPorts, double cpu, double mem, Map<String, Double> miscResources) { return genSupervisors(numSup, numPorts, 0, cpu, mem, miscResources); } public static Map<String, SupervisorDetails> genSupervisors(int numSup, int numPorts, int start, double cpu, double mem) { return genSupervisors(numSup, numPorts, start, cpu, mem, Collections.emptyMap()); } public static Map<String, SupervisorDetails> genSupervisors(int numSup, int numPorts, int start, double cpu, double mem, Map<String, Double> miscResources) { return genSupervisorsWithRacks(1, numSup, numPorts, 0, start, cpu, mem, miscResources); } private static final Pattern HOST_NAME_PATTERN = Pattern.compile("^(host-\\d+)-(.+)$"); public static String hostNameToRackName(String hostName) { Matcher m = HOST_NAME_PATTERN.matcher(hostName); if (m.matches()) { return m.group(2); } return DNSToSwitchMapping.DEFAULT_RACK; } private static final Pattern SUPERVISOR_ID_PATTERN = Pattern.compile("^(r\\d+)s(\\d+)$"); public static String supervisorIdToRackName(String hostName) { Matcher m = SUPERVISOR_ID_PATTERN.matcher(hostName); if (m.matches()) { return m.group(1); } return DNSToSwitchMapping.DEFAULT_RACK; } public static class GenSupervisorsDnsToSwitchMapping implements DNSToSwitchMapping { private Map<String, String> mappingCache = new ConcurrentHashMap<>(); @Override public Map<String,String> resolve(List<String> names) { Map<String, String> m = new HashMap<>(); for (String name : names) { m.put(name, mappingCache.computeIfAbsent(name, TestUtilsForResourceAwareScheduler::hostNameToRackName)); } return m; } } public static Map<String, SupervisorDetails> genSupervisorsWithRacks(int numRacks, int numSupersPerRack, int numPorts, int rackStart, int superInRackStart, double cpu, double mem, Map<String, Double> miscResources) { Map<String, Double> resourceMap = new HashMap<>(); resourceMap.put(Config.SUPERVISOR_CPU_CAPACITY, cpu); resourceMap.put(Config.SUPERVISOR_MEMORY_CAPACITY_MB, mem); resourceMap.putAll(miscResources); Map<String, SupervisorDetails> retList = new HashMap<>(); for (int rack = rackStart; rack < numRacks + rackStart; rack++) { for (int superInRack = superInRackStart; superInRack < (numSupersPerRack + superInRackStart); superInRack++) { List<Number> ports = new LinkedList<>(); for (int p = 0; p < numPorts; p++) { ports.add(p); } SupervisorDetails sup = new SupervisorDetails(String.format("r%03ds%03d", rack, superInRack), String.format("host-%03d-rack-%03d", superInRack, rack), null, ports, NormalizedResources.RESOURCE_NAME_NORMALIZER.normalizedResourceMap(resourceMap)); retList.put(sup.getId(), sup); } } return retList; } public static Map<ExecutorDetails, String> genExecsAndComps(StormTopology topology) { Map<ExecutorDetails, String> retMap = new HashMap<>(); int startTask = 0; int endTask = 0; for (Map.Entry<String, SpoutSpec> entry : topology.get_spouts().entrySet()) { SpoutSpec spout = entry.getValue(); String spoutId = entry.getKey(); int spoutParallelism = spout.get_common().get_parallelism_hint(); for (int i = 0; i < spoutParallelism; i++) { retMap.put(new ExecutorDetails(startTask, endTask), spoutId); startTask++; endTask++; } } for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) { String boltId = entry.getKey(); Bolt bolt = entry.getValue(); int boltParallelism = bolt.get_common().get_parallelism_hint(); for (int i = 0; i < boltParallelism; i++) { retMap.put(new ExecutorDetails(startTask, endTask), boltId); startTask++; endTask++; } } return retMap; } public static Topologies addTopologies(Topologies topos, TopologyDetails... details) { Map<String, TopologyDetails> topoMap = new HashMap<>(); for (TopologyDetails td : topos.getTopologies()) { topoMap.put(td.getId(), td); } for (TopologyDetails td : details) { if (topoMap.put(td.getId(), td) != null) { throw new IllegalArgumentException("Cannot have multiple topologies with id " + td.getId()); } } return new Topologies(topoMap); } public static TopologyDetails genTopology(String name, Map<String, Object> config, int numSpout, int numBolt, int spoutParallelism, int boltParallelism, int launchTime, int priority, String user) { return genTopology(name, config, numSpout, numBolt, spoutParallelism, boltParallelism, launchTime, priority, user, Double.MAX_VALUE); } public static TopologyDetails genTopology(String name, Map<String, Object> config, int numSpout, int numBolt, int spoutParallelism, int boltParallelism, int launchTime, int priority, String user, double maxHeapSize) { StormTopology topology = buildTopology(numSpout, numBolt, spoutParallelism, boltParallelism); return topoToTopologyDetails(name, config, topology, launchTime, priority, user, maxHeapSize); } public static TopologyDetails topoToTopologyDetails(String name, Map<String, Object> config, StormTopology topology, int launchTime, int priority, String user, double maxHeapSize) { Config conf = new Config(); conf.putAll(config); conf.put(Config.TOPOLOGY_PRIORITY, priority); conf.put(Config.TOPOLOGY_NAME, name); conf.put(Config.TOPOLOGY_SUBMITTER_USER, user); conf.put(Config.TOPOLOGY_WORKER_MAX_HEAP_SIZE_MB, maxHeapSize); TopologyDetails topo = new TopologyDetails(name + "-" + launchTime, conf, topology, 0, genExecsAndComps(topology), launchTime, user); return topo; } public static StormTopology buildTopology(int numSpout, int numBolt, int spoutParallelism, int boltParallelism) { return topologyBuilder(numSpout, numBolt, spoutParallelism, boltParallelism).createTopology(); } public static TopologyBuilder topologyBuilder(int numSpout, int numBolt, int spoutParallelism, int boltParallelism) { LOG.debug("buildTopology with -> numSpout: " + numSpout + " spoutParallelism: " + spoutParallelism + " numBolt: " + numBolt + " boltParallelism: " + boltParallelism); TopologyBuilder builder = new TopologyBuilder(); for (int i = 0; i < numSpout; i++) { SpoutDeclarer s1 = builder.setSpout("spout-" + i, new TestSpout(), spoutParallelism); } int j = 0; for (int i = 0; i < numBolt; i++) { if (j >= numSpout) { j = 0; } BoltDeclarer b1 = builder.setBolt("bolt-" + i, new TestBolt(), boltParallelism).shuffleGrouping("spout-" + j); j++; } return builder; } public static class TestSpout extends BaseRichSpout { boolean _isDistributed; SpoutOutputCollector _collector; public TestSpout() { this(true); } public TestSpout(boolean isDistributed) { _isDistributed = isDistributed; } @Override public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) { _collector = collector; } @Override public void close() { } @Override public void nextTuple() { Utils.sleep(100); final String[] words = new String[]{ "nathan", "mike", "jackson", "golda", "bertels" }; final Random rand = new Random(); final String word = words[rand.nextInt(words.length)]; _collector.emit(new Values(word)); } @Override public void ack(Object msgId) { } @Override public void fail(Object msgId) { } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("word")); } @Override public Map<String, Object> getComponentConfiguration() { if (!_isDistributed) { Map<String, Object> ret = new HashMap<>(); ret.put(Config.TOPOLOGY_MAX_TASK_PARALLELISM, 1); return ret; } else { return null; } } } public static class TestBolt extends BaseRichBolt { OutputCollector _collector; @Override public void prepare(Map<String, Object> conf, TopologyContext context, OutputCollector collector) { _collector = collector; } @Override public void execute(Tuple tuple) { _collector.emit(tuple, new Values(tuple.getString(0) + "!!!")); } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("word")); } } public static class INimbusTest implements INimbus { @Override public void prepare(Map<String, Object> topoConf, String schedulerLocalDir) { } @Override public Collection<WorkerSlot> allSlotsAvailableForScheduling(Collection<SupervisorDetails> existingSupervisors, Topologies topologies, Set<String> topologiesMissingAssignments) { return null; } @Override public void assignSlots(Topologies topologies, Map<String, Collection<WorkerSlot>> newSlotsByTopologyId) { } @Override public String getHostName(Map<String, SupervisorDetails> existingSupervisors, String nodeId) { if (existingSupervisors.containsKey(nodeId)) { return existingSupervisors.get(nodeId).getHost(); } return null; } @Override public IScheduler getForcedScheduler() { return null; } } private static boolean isContain(String source, String subItem) { String pattern = "\\b" + subItem + "\\b"; Pattern p = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(source); return m.find(); } public static void assertTopologiesNotScheduled(Cluster cluster, String... topoNames) { Topologies topologies = cluster.getTopologies(); for (String topoName : topoNames) { TopologyDetails td = topologies.getByName(topoName); assert (td != null) : topoName; String topoId = td.getId(); String status = cluster.getStatus(topoId); assert (status != null) : topoName; assert (!isStatusSuccess(status)) : topoName; assert (cluster.getAssignmentById(topoId) == null) : topoName; assert (cluster.needsSchedulingRas(td)) : topoName; } } public static void assertTopologiesFullyScheduled(Cluster cluster, String... topoNames) { Topologies topologies = cluster.getTopologies(); for (String topoName : topoNames) { TopologyDetails td = topologies.getByName(topoName); assert (td != null) : topoName; String topoId = td.getId(); assertStatusSuccess(cluster, topoId); assert (cluster.getAssignmentById(topoId) != null) : topoName; assert (cluster.needsSchedulingRas(td) == false) : topoName; } } public static void assertTopologiesBeenEvicted(Cluster cluster, Set<String> evictedTopologies, String... topoNames) { Topologies topologies = cluster.getTopologies(); LOG.info("Evicted topos: {}", evictedTopologies); assert (evictedTopologies != null); for (String topoName : topoNames) { TopologyDetails td = topologies.getByName(topoName); assert (td != null) : topoName; String topoId = td.getId(); assert (evictedTopologies.contains(topoId)) : topoName; } } public static void assertTopologiesNotBeenEvicted(Cluster cluster, Set<String> evictedTopologies, String... topoNames) { Topologies topologies = cluster.getTopologies(); LOG.info("Evicted topos: {}", evictedTopologies); assert (evictedTopologies != null); for (String topoName : topoNames) { TopologyDetails td = topologies.getByName(topoName); assert (td != null) : topoName; String topoId = td.getId(); assert (!evictedTopologies.contains(topoId)) : topoName; } } public static void assertStatusSuccess(Cluster cluster, String topoId) { assert (isStatusSuccess(cluster.getStatus(topoId))) : "topology status " + topoId + " is not successful " + cluster.getStatus(topoId); } public static boolean isStatusSuccess(String status) { return isContain(status, "fully") && isContain(status, "scheduled") && !isContain(status, "unsuccessful"); } public static Map<SupervisorDetails, Double> getSupervisorToMemoryUsage(ISchedulingState cluster, Topologies topologies) { Map<SupervisorDetails, Double> superToMem = new HashMap<>(); Collection<SchedulerAssignment> assignments = cluster.getAssignments().values(); Collection<SupervisorDetails> supervisors = cluster.getSupervisors().values(); for (SupervisorDetails supervisor : supervisors) { superToMem.put(supervisor, 0.0); } for (SchedulerAssignment assignment : assignments) { Map<ExecutorDetails, SupervisorDetails> executorToSupervisor = new HashMap<>(); Map<SupervisorDetails, List<ExecutorDetails>> supervisorToExecutors = new HashMap<>(); TopologyDetails topology = topologies.getById(assignment.getTopologyId()); for (Map.Entry<ExecutorDetails, WorkerSlot> entry : assignment.getExecutorToSlot().entrySet()) { executorToSupervisor.put(entry.getKey(), cluster.getSupervisorById(entry.getValue().getNodeId())); } for (Map.Entry<ExecutorDetails, SupervisorDetails> entry : executorToSupervisor.entrySet()) { List<ExecutorDetails> executorsOnSupervisor = supervisorToExecutors.get(entry.getValue()); if (executorsOnSupervisor == null) { executorsOnSupervisor = new ArrayList<>(); supervisorToExecutors.put(entry.getValue(), executorsOnSupervisor); } executorsOnSupervisor.add(entry.getKey()); } for (Map.Entry<SupervisorDetails, List<ExecutorDetails>> entry : supervisorToExecutors.entrySet()) { Double supervisorUsedMemory = 0.0; for (ExecutorDetails executor : entry.getValue()) { supervisorUsedMemory += topology.getTotalMemReqTask(executor); } superToMem.put(entry.getKey(), superToMem.get(entry.getKey()) + supervisorUsedMemory); } } return superToMem; } public static Map<SupervisorDetails, Double> getSupervisorToCpuUsage(ISchedulingState cluster, Topologies topologies) { Map<SupervisorDetails, Double> superToCpu = new HashMap<>(); Collection<SchedulerAssignment> assignments = cluster.getAssignments().values(); Collection<SupervisorDetails> supervisors = cluster.getSupervisors().values(); for (SupervisorDetails supervisor : supervisors) { superToCpu.put(supervisor, 0.0); } for (SchedulerAssignment assignment : assignments) { Map<ExecutorDetails, SupervisorDetails> executorToSupervisor = new HashMap<>(); Map<SupervisorDetails, List<ExecutorDetails>> supervisorToExecutors = new HashMap<>(); TopologyDetails topology = topologies.getById(assignment.getTopologyId()); for (Map.Entry<ExecutorDetails, WorkerSlot> entry : assignment.getExecutorToSlot().entrySet()) { executorToSupervisor.put(entry.getKey(), cluster.getSupervisorById(entry.getValue().getNodeId())); } for (Map.Entry<ExecutorDetails, SupervisorDetails> entry : executorToSupervisor.entrySet()) { List<ExecutorDetails> executorsOnSupervisor = supervisorToExecutors.get(entry.getValue()); if (executorsOnSupervisor == null) { executorsOnSupervisor = new ArrayList<>(); supervisorToExecutors.put(entry.getValue(), executorsOnSupervisor); } executorsOnSupervisor.add(entry.getKey()); } for (Map.Entry<SupervisorDetails, List<ExecutorDetails>> entry : supervisorToExecutors.entrySet()) { Double supervisorUsedCpu = 0.0; for (ExecutorDetails executor : entry.getValue()) { supervisorUsedCpu += topology.getTotalCpuReqTask(executor); } superToCpu.put(entry.getKey(), superToCpu.get(entry.getKey()) + supervisorUsedCpu); } } return superToCpu; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.script; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.resource.ResourceReferences; import org.apache.nifi.context.PropertyContext; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processors.script.ScriptRunner; import org.apache.nifi.util.StringUtils; import javax.script.Invocable; import javax.script.ScriptEngineFactory; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import static org.apache.commons.lang3.StringUtils.defaultIfBlank; /** * This class contains variables and methods common to scripting processors, reporting tasks, etc. */ public class ScriptingComponentHelper { private static final String UNKNOWN_VERSION = "UNKNOWN"; public PropertyDescriptor SCRIPT_ENGINE; public final AtomicBoolean isInitialized = new AtomicBoolean(false); public Map<String, ScriptEngineFactory> scriptEngineFactoryMap; private String scriptEngineName; private String scriptPath; private String scriptBody; private List<PropertyDescriptor> descriptors; private List<AllowableValue> engineAllowableValues; private ResourceReferences modules; public BlockingQueue<ScriptRunner> scriptRunnerQ = null; public String getScriptEngineName() { return scriptEngineName; } public void setScriptEngineName(String scriptEngineName) { this.scriptEngineName = scriptEngineName; } public String getScriptPath() { return scriptPath; } public void setScriptPath(String scriptPath) { this.scriptPath = scriptPath; } public String getScriptBody() { return scriptBody; } public void setScriptBody(String scriptBody) { this.scriptBody = scriptBody; } public String[] getModules() { return modules.asLocations().toArray(new String[0]); } public void setModules(final ResourceReferences modules) { this.modules = modules; } public List<PropertyDescriptor> getDescriptors() { return descriptors; } public List<AllowableValue> getScriptEngineAllowableValues() { return engineAllowableValues; } public void setDescriptors(List<PropertyDescriptor> descriptors) { this.descriptors = descriptors; } /** * Custom validation for ensuring exactly one of Script File or Script Body is populated * * @param validationContext provides a mechanism for obtaining externally * managed values, such as property values and supplies convenience methods * for operating on those values * @return A collection of validation results */ public Collection<ValidationResult> customValidate(ValidationContext validationContext) { Set<ValidationResult> results = new HashSet<>(); // Verify that exactly one of "script file" or "script body" is set Map<PropertyDescriptor, String> propertyMap = validationContext.getProperties(); if (StringUtils.isEmpty(propertyMap.get(ScriptingComponentUtils.SCRIPT_FILE)) == StringUtils.isEmpty(propertyMap.get(ScriptingComponentUtils.SCRIPT_BODY))) { results.add(new ValidationResult.Builder().subject("Script Body or Script File").valid(false).explanation( "exactly one of Script File or Script Body must be set").build()); } return results; } public void createResources() { createResources(true); } /** * This method creates all resources needed for the script processor to function, such as script engines, * script file reloader threads, etc. */ public void createResources(final boolean requireInvocable) { descriptors = new ArrayList<>(); // The following is required for JRuby, should be transparent to everything else. // Note this is not done in a ScriptRunner, as it is too early in the lifecycle. The // setting must be there before the factories/engines are loaded. System.setProperty("org.jruby.embed.localvariable.behavior", "persistent"); // Create list of available engines ScriptEngineManager scriptEngineManager = new ScriptEngineManager(); List<ScriptEngineFactory> scriptEngineFactories = scriptEngineManager.getEngineFactories(); if (scriptEngineFactories != null) { scriptEngineFactoryMap = new HashMap<>(scriptEngineFactories.size()); List<AllowableValue> engineList = new LinkedList<>(); for (ScriptEngineFactory factory : scriptEngineFactories) { if (!requireInvocable || factory.getScriptEngine() instanceof Invocable) { final AllowableValue scriptEngineAllowableValue = getScriptLanguageAllowableValue(factory); engineList.add(scriptEngineAllowableValue); scriptEngineFactoryMap.put(factory.getLanguageName(), factory); } } // Sort the list by name so the list always looks the same. engineList.sort((o1, o2) -> { if (o1 == null) { return o2 == null ? 0 : 1; } if (o2 == null) { return -1; } return o1.getValue().compareTo(o2.getValue()); }); engineAllowableValues = engineList; AllowableValue[] engines = engineList.toArray(new AllowableValue[0]); SCRIPT_ENGINE = new PropertyDescriptor.Builder() .name("Script Engine") .required(true) .description("The engine to execute scripts") .allowableValues(engines) .defaultValue(engines[0].getValue()) .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); descriptors.add(SCRIPT_ENGINE); } descriptors.add(ScriptingComponentUtils.SCRIPT_FILE); descriptors.add(ScriptingComponentUtils.SCRIPT_BODY); descriptors.add(ScriptingComponentUtils.MODULES); isInitialized.set(true); } /** * Determines whether the given path refers to a valid file * * @param path a path to a file * @return true if the path refers to a valid file, false otherwise */ public static boolean isFile(final String path) { return path != null && Files.isRegularFile(Paths.get(path)); } public void setupScriptRunners(final int numberOfScriptEngines, final String scriptToRun, final ComponentLog log) { setupScriptRunners(true, numberOfScriptEngines, scriptToRun, log); } /** * Configures the specified script engine(s) as a queue of ScriptRunners. First, the engine is loaded and instantiated using the JSR-223 * javax.script APIs. Then, if any script configurators have been defined for this engine, their init() method is * called, and the configurator is saved for future calls. * * @param numberOfScriptEngines number of engines to setup * @see org.apache.nifi.processors.script.ScriptRunner */ public void setupScriptRunners(final boolean newQ, final int numberOfScriptEngines, final String scriptToRun, final ComponentLog log) { if (newQ) { scriptRunnerQ = new LinkedBlockingQueue<>(numberOfScriptEngines); } ClassLoader originalContextClassLoader = Thread.currentThread().getContextClassLoader(); try { if (StringUtils.isBlank(scriptEngineName)) { throw new IllegalArgumentException("The script engine name cannot be null"); } // Get a list of URLs from the configurator (if present), or just convert modules from Strings to URLs final String[] locations = modules.asLocations().toArray(new String[0]); final URL[] additionalClasspathURLs = ScriptRunnerFactory.getInstance().getModuleURLsForClasspath(scriptEngineName, locations, log); // Need the right classloader when the engine is created. This ensures the NAR's execution class loader // (plus the module path) becomes the parent for the script engine ClassLoader scriptEngineModuleClassLoader = additionalClasspathURLs != null ? new URLClassLoader(additionalClasspathURLs, originalContextClassLoader) : originalContextClassLoader; if (scriptEngineModuleClassLoader != null) { Thread.currentThread().setContextClassLoader(scriptEngineModuleClassLoader); } try { for (int i = 0; i < numberOfScriptEngines; i++) { // ScriptEngineFactory factory = scriptEngineFactoryMap.get(scriptEngineName); ScriptRunner scriptRunner = ScriptRunnerFactory.getInstance().createScriptRunner(factory, scriptToRun, locations); if (!scriptRunnerQ.offer(scriptRunner)) { log.error("Error adding script engine {}", scriptRunner.getScriptEngineName()); } } } catch (ScriptException se) { throw new ProcessException("Could not instantiate script engines", se); } } finally { // Restore original context class loader Thread.currentThread().setContextClassLoader(originalContextClassLoader); } } public void setupVariables(final PropertyContext context) { scriptEngineName = context.getProperty(SCRIPT_ENGINE).getValue(); scriptPath = context.getProperty(ScriptingComponentUtils.SCRIPT_FILE).evaluateAttributeExpressions().getValue(); scriptBody = context.getProperty(ScriptingComponentUtils.SCRIPT_BODY).getValue(); modules = context.getProperty(ScriptingComponentUtils.MODULES).evaluateAttributeExpressions().asResources().flattenRecursively(); } public void stop() { if (scriptRunnerQ != null) { scriptRunnerQ.clear(); } } private AllowableValue getScriptLanguageAllowableValue(final ScriptEngineFactory factory) { final String languageName = factory.getLanguageName(); final String languageVersion = defaultIfBlank(factory.getLanguageVersion(), UNKNOWN_VERSION); final String engineVersion = defaultIfBlank(factory.getEngineVersion(), UNKNOWN_VERSION); final String description = String.format("%s %s [%s %s]", languageName, languageVersion, factory.getEngineName(), engineVersion); return new AllowableValue(languageName, languageName, description); } }
package com.openDams.index.factory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.commons.lang.StringEscapeUtils; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.dom4j.DocumentException; import com.openDams.bean.Archives; import com.openDams.bean.Records; import com.openDams.bean.Relations; import com.openDams.dao.OpenDamsServiceProvider; import com.openDams.index.configuration.Element; import com.regesta.framework.xml.XMLReader; public class DocumentFactory { public final static String ID_RECORD = "id_record"; public final static String TITLE_RECORD = "title_record"; public final static String ID_ARCHIVE = "id_archive"; public final static String ARCHIVE_LABEL = "archive_label"; public DocumentFactory() { } public synchronized static Document buildDocument(Records record,Archives archives,ArrayList<Object> elements) throws DocumentException{ try { //System.out.println("#######################################################################1 DocumentFactory.buildDocument()"); Document doc = new Document(); doc.add(new Field(ID_RECORD, record.getIdRecord().toString(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(TITLE_RECORD, record.getTitle(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ID_ARCHIVE, Integer.toString(archives.getIdArchive()), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ARCHIVE_LABEL, archives.getLabel(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); XMLReader xmlReader = record.getXMLReader(); HashMap<Integer, Integer> alredyDone= new HashMap<Integer, Integer>(); ArrayList<Element> externalElements = new ArrayList<Element>(); for(int i=0;i<elements.size();i++){ Element element=(Element)elements.get(i); if(element.getType()==null || (!element.getType().equalsIgnoreCase("external") && !element.getType().equalsIgnoreCase("boostRecord") && !element.getType().equalsIgnoreCase("relation") && !element.getType().equalsIgnoreCase("rebuilder"))){ String[] search_alias = element.getSearch_alias().split(","); @SuppressWarnings("unused") String sortValue = ""; ArrayList<String> values = xmlReader.getNodesValues(element.getText()); for (int k = 0; k < values.size(); k++) { //String value = values.get(k); String value = StringEscapeUtils.unescapeXml(values.get(k).trim()); sortValue+=" "+value; for (int j = 0; j < search_alias.length; j++) { if(value!=null && !value.equals("")){ try { if(element.getClassUtil()!=null){ Class<?> c = Class.forName(element.getClassUtil()); Object obj = c.newInstance(); Method method = c.getMethod(element.getMethod(), Integer.class ,String.class ); value = (String) method.invoke(obj,record.getIdRecord(),value); } } catch (ClassNotFoundException e) { throw new DocumentException(e.getMessage()); } catch (SecurityException e) { throw new DocumentException(e.getMessage()); } catch (NoSuchMethodException e) { throw new DocumentException(e.getMessage()); } catch (IllegalArgumentException e) { throw new DocumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new DocumentException(e.getMessage()); } catch (InvocationTargetException e) { throw new DocumentException(e.getMessage()); } catch (InstantiationException e) { throw new DocumentException(e.getMessage()); } if(j==0){ if(element.getKey_style().equalsIgnoreCase("multi")){ doc.add(new Field(search_alias[j].trim()+"_one", value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("one"))); doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("double"))); }else{ doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } }else{ doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("double"))); } } } } if( (values==null || values.size()==0) && element.getIf_empty_default()!=null){ for (int j = 0; j < search_alias.length; j++) { doc.add(new Field(search_alias[j].trim(), element.getIf_empty_default(), LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } } }else if(element.getType()!=null && element.getType().equalsIgnoreCase("boostRecord")){ if(xmlReader.getNodeValue(element.getText())!=null && !xmlReader.getNodeValue(element.getText()).equals("")){ doc.setBoost(Float.parseFloat(element.getBoost())); } }else if(element.getType()!=null && element.getType().equalsIgnoreCase("relation")){ alredyDone.put(record.getIdRecord(), record.getIdRecord()); importFromRelatedDocuments(element, record.getIdRecord(), doc, alredyDone); }else if(element.getType()!=null && element.getType().equalsIgnoreCase("rebuilder")){ try { if(element.getClassUtil()!=null){ Class<?> c = Class.forName(element.getClassUtil()); Object obj = c.newInstance(); Method method = c.getMethod(element.getMethod(),Integer.class,Integer.class,Integer.class ); method.invoke(obj,record.getIdRecord(),new Integer(element.getId_archive()),new Integer(element.getId_relation())); } } catch (ClassNotFoundException e) { throw new DocumentException(e.getMessage()); } catch (SecurityException e) { throw new DocumentException(e.getMessage()); } catch (NoSuchMethodException e) { throw new DocumentException(e.getMessage()); } catch (IllegalArgumentException e) { throw new DocumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new DocumentException(e.getMessage()); } catch (InvocationTargetException e) { throw new DocumentException(e.getMessage()); } catch (InstantiationException e) { throw new DocumentException(e.getMessage()); } }else{ externalElements.add(element); } } if(record.getExternalcontentsMap()!=null){ HashMap<String, ArrayList<String>> externalcontentsMap = record.getExternalcontentsMap(); Set<String> keys = externalcontentsMap.keySet(); Iterator<String> iterator = keys.iterator(); while (iterator.hasNext()) { String key = (String) iterator.next(); ArrayList<String> values = externalcontentsMap.get(key); Element element = null; for (int i = 0; i < externalElements.size(); i++) { if(externalElements.get(i).getSearch_alias().equalsIgnoreCase(key)){ element = externalElements.get(i); break; } } if(element!=null) for (int i = 0; i < values.size(); i++) { doc.add(new Field(key, values.get(i), LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } } } return doc; } catch (Exception e) { e.printStackTrace(); Document doc = new Document(); doc.add(new Field(ID_RECORD, record.getIdRecord().toString(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(TITLE_RECORD, record.getTitle(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ID_ARCHIVE, Integer.toString(archives.getIdArchive()), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ARCHIVE_LABEL, archives.getLabel(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); return getErrorDocument(elements,doc); //throw new DocumentException(e.getMessage()); } } public synchronized static Document buildDocument(String id_record,String id_archive,String archive_label,String title,String xml, ArrayList<Object> elements) throws DocumentException{ try { //System.out.println("#######################################################################2 DocumentFactory.buildDocument()"); Document doc = new Document(); doc.add(new Field(ID_RECORD, id_record, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(TITLE_RECORD,title, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ID_ARCHIVE, id_archive, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ARCHIVE_LABEL, archive_label, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); XMLReader xmlReader = new XMLReader(xml); HashMap<Integer, Integer> alredyDone= new HashMap<Integer, Integer>(); for(int i=0;i<elements.size();i++){ Element element=(Element)elements.get(i); if(element.getType()==null || (!element.getType().equalsIgnoreCase("external") && !element.getType().equalsIgnoreCase("boostRecord") && !element.getType().equalsIgnoreCase("relation") && !element.getType().equalsIgnoreCase("rebuilder"))){ String[] search_alias = element.getSearch_alias().split(","); //String value = xmlReader.getNodeValue(element.getText()); @SuppressWarnings("unused") String sortValue = ""; ArrayList<String> values = xmlReader.getNodesValues(element.getText()); for (int k = 0; k < values.size(); k++) { //String value = values.get(k); String value=StringEscapeUtils.unescapeXml(values.get(k).trim()); sortValue+=" "+value; for (int j = 0; j < search_alias.length; j++) { if(value!=null && !value.equals("")){ try { if(element.getClassUtil()!=null){ Class<?> c = Class.forName(element.getClassUtil()); Object obj = c.newInstance(); Method method = c.getMethod(element.getMethod(),Integer.class,String.class ); //System.out.println(method.invoke(obj, value)); value = (String) method.invoke(obj,new Integer(id_record),value); } } catch (ClassNotFoundException e) { throw new DocumentException(e.getMessage()); } catch (SecurityException e) { throw new DocumentException(e.getMessage()); } catch (NoSuchMethodException e) { throw new DocumentException(e.getMessage()); } catch (IllegalArgumentException e) { throw new DocumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new DocumentException(e.getMessage()); } catch (InvocationTargetException e) { throw new DocumentException(e.getMessage()); } catch (InstantiationException e) { throw new DocumentException(e.getMessage()); } if(j==0){ if(element.getKey_style().equalsIgnoreCase("multi")){ doc.add(new Field(search_alias[j].trim()+"_one", value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("one"))); doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("double"))); }else{ doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } }else{ doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("double"))); } //doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } } } if( (values==null || values.size()==0) && element.getIf_empty_default()!=null){ for (int j = 0; j < search_alias.length; j++) { doc.add(new Field(search_alias[j].trim(), element.getIf_empty_default(), LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } } }else if(element.getType()!=null && element.getType().equalsIgnoreCase("boostRecord")){ if(xmlReader.getNodeValue(element.getText())!=null && !xmlReader.getNodeValue(element.getText()).equals("")){ doc.setBoost(new Float(element.getBoost())); } }else if(element.getType()!=null && element.getType().equalsIgnoreCase("relation")){ alredyDone.put(new Integer(id_record), new Integer(id_record)); importFromRelatedDocuments(element, new Integer(id_record), doc, alredyDone); }else if(element.getType()!=null && element.getType().equalsIgnoreCase("rebuilder")){ try { if(element.getClassUtil()!=null){ Class<?> c = Class.forName(element.getClassUtil()); Object obj = c.newInstance(); Method method = c.getMethod(element.getMethod(),Integer.class,Integer.class,Integer.class ); method.invoke(obj,new Integer(id_record),new Integer(element.getId_archive()),new Integer(element.getId_relation())); } } catch (ClassNotFoundException e) { throw new DocumentException(e.getMessage()); } catch (SecurityException e) { throw new DocumentException(e.getMessage()); } catch (NoSuchMethodException e) { throw new DocumentException(e.getMessage()); } catch (IllegalArgumentException e) { throw new DocumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new DocumentException(e.getMessage()); } catch (InvocationTargetException e) { throw new DocumentException(e.getMessage()); } catch (InstantiationException e) { throw new DocumentException(e.getMessage()); } } } return doc; } catch (Exception e) { e.printStackTrace(); Document doc = new Document(); doc.add(new Field(ID_RECORD, id_record, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(TITLE_RECORD,title, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ID_ARCHIVE, id_archive, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ARCHIVE_LABEL, archive_label, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); return getErrorDocument(elements,doc); //throw new DocumentException(e.getMessage()); } } private static void importFromRelatedDocuments(Element element,Integer id_record,Document doc,HashMap<Integer, Integer> alredyDone) throws DocumentException{ System.out.println("VADO A PRENDERE I VALORI DAI RECORD COLLEGATI!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); @SuppressWarnings("unchecked") List<Relations> relationsList = (List<Relations>)OpenDamsServiceProvider.getService().getListFromSQL(Relations.class,"SELECT * FROM relations r where (ref_id_record_1="+id_record+" or ref_id_record_2="+id_record+") and ref_id_relation_type="+element.getId_relation()+";"); for (int z = 0; z < relationsList.size(); z++) { Relations relations = relationsList.get(z); Records relatedRecords = null; if(relations.getRecordsByRefIdRecord1().getIdRecord()!= id_record) relatedRecords = relations.getRecordsByRefIdRecord1(); else relatedRecords = relations.getRecordsByRefIdRecord2(); if(relatedRecords.getArchives().getIdArchive().intValue() == new Integer(element.getId_archive()).intValue() && alredyDone.get(relatedRecords.getIdRecord())==null){ String[] toGetValuesXpaths = element.getText().split(","); String[] search_alias = element.getSearch_alias().split(","); for (int y = 0; y < toGetValuesXpaths.length; y++) { String xpath = toGetValuesXpaths[y]; ArrayList<String> values = relatedRecords.getXMLReader().getNodesValues(xpath); for (int k = 0; k < values.size(); k++) { String value=StringEscapeUtils.unescapeXml(values.get(k).trim()); for (int j = 0; j < search_alias.length; j++) { if(value!=null && !value.equals("")){ try { if(element.getClassUtil()!=null){ Class<?> c = Class.forName(element.getClassUtil()); Object obj = c.newInstance(); Method method = c.getMethod(element.getMethod(),Integer.class,String.class ); value = (String) method.invoke(obj,id_record,value); } } catch (ClassNotFoundException e) { throw new DocumentException(e.getMessage()); } catch (SecurityException e) { throw new DocumentException(e.getMessage()); } catch (NoSuchMethodException e) { throw new DocumentException(e.getMessage()); } catch (IllegalArgumentException e) { throw new DocumentException(e.getMessage()); } catch (IllegalAccessException e) { throw new DocumentException(e.getMessage()); } catch (InvocationTargetException e) { throw new DocumentException(e.getMessage()); } catch (InstantiationException e) { throw new DocumentException(e.getMessage()); } if(j==0){ if(element.getKey_style().equalsIgnoreCase("multi")){ doc.add(new Field(search_alias[j].trim()+"_one", value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("one"))); doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("double"))); }else{ doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType(element.getKey_style()))); } }else{ doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("double"))); } } } } } alredyDone.put(relatedRecords.getIdRecord(), relatedRecords.getIdRecord()); } } } public synchronized static Document buildGenericdDocument(Records record,Archives archives) throws DocumentException{ Document doc = new Document(); doc.add(new Field(ID_RECORD, record.getIdRecord().toString(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(TITLE_RECORD, record.getTitle(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ID_ARCHIVE, Integer.toString(archives.getIdArchive()), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ARCHIVE_LABEL, archives.getLabel(), LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); HashMap<String, ArrayList<String>> xpatsMap = record.getXpatsMap(); Set<String> set = xpatsMap.keySet(); for (String string : set) { ArrayList<String> strings = xpatsMap.get(string); for (int i = 0; i < strings.size(); i++) { doc.add(new Field(string, strings.get(i), LuceneFactory.getStore("no"), LuceneFactory.getIndexType("ANALYZED"))); } } return doc; } public synchronized static Document buildGenericdDocument(String id_record,String id_archive,String archive_label,String title,String xml) throws DocumentException{ Document doc = new Document(); doc.add(new Field(ID_RECORD, id_record, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(TITLE_RECORD, title, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ID_ARCHIVE, id_archive, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); doc.add(new Field(ARCHIVE_LABEL, archive_label, LuceneFactory.getStore("yes"), LuceneFactory.getIndexType("NOT_ANALYZED"))); XMLReader xmlReader = new XMLReader(xml); HashMap<String, ArrayList<String>> xpatsMap = new HashMap<String, ArrayList<String>>(); xmlReader.analyzeNodes(xpatsMap); Set<String> set = xpatsMap.keySet(); for (String string : set) { ArrayList<String> strings = xpatsMap.get(string); for (int i = 0; i < strings.size(); i++) { doc.add(new Field(string, strings.get(i), LuceneFactory.getStore("no"), LuceneFactory.getIndexType("ANALYZED"))); } } return doc; } public static boolean checkMultiFieldValue(HashMap<Integer, ArrayList<Object>> elements_map,String fieldName,String[] requestArchives){ boolean result = true; for (int x = 0; x < requestArchives.length; x++) { ArrayList<Object> elements = elements_map.get(new Integer(requestArchives[x])); for(int i=0;i<elements.size();i++){ Element element=(Element)elements.get(i); if(element.getType()==null || !element.getType().equalsIgnoreCase("external")){ String[] searchAliases = element.getSearch_alias().split(","); for (int k = 0; k < searchAliases.length; k++) { if(searchAliases[k].equalsIgnoreCase(fieldName)){ if(element.getKey_style().equalsIgnoreCase("multi")){ result = true; }else{ return false; } } } } } } return result; } private static Document getErrorDocument(ArrayList<Object> elements,Document doc){ for(int i=0;i<elements.size();i++){ Element element=(Element)elements.get(i); if(element.getType()==null || (!element.getType().equalsIgnoreCase("external") && !element.getType().equalsIgnoreCase("boostRecord"))){ String[] search_alias = element.getSearch_alias().split(","); @SuppressWarnings("unused") String sortValue = ""; String value= "n/a"; sortValue+=" "+value; for (int j = 0; j < search_alias.length; j++) { doc.add(new Field(search_alias[j].trim(), value, LuceneFactory.getStore(element.getLucene_store_type()), LuceneFactory.getIndexType("one"))); } } } return doc; } }
package lx.af.utils.UIL.displayer; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffColorFilter; import android.os.Build; import android.renderscript.Allocation; import android.renderscript.Element; import android.renderscript.RSRuntimeException; import android.renderscript.RenderScript; import android.renderscript.ScriptIntrinsicBlur; import android.view.View; /** * Copyright (C) 2015 Wasabeef * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * import from https://github.com/wasabeef/Blurry */ class Blur { public static Bitmap of(View view, BlurFactor factor) { view.setDrawingCacheEnabled(true); view.destroyDrawingCache(); view.setDrawingCacheQuality(View.DRAWING_CACHE_QUALITY_LOW); Bitmap cache = view.getDrawingCache(); Bitmap bitmap = of(view.getContext(), cache, factor); cache.recycle(); return bitmap; } public static Bitmap of(Context context, Bitmap source, BlurFactor factor) { int width = factor.width / factor.sampling; int height = factor.height / factor.sampling; if (width == 0 || height == 0) { return null; } Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); canvas.scale(1 / (float) factor.sampling, 1 / (float) factor.sampling); Paint paint = new Paint(); paint.setFlags(Paint.FILTER_BITMAP_FLAG | Paint.ANTI_ALIAS_FLAG); PorterDuffColorFilter filter = new PorterDuffColorFilter(factor.color, PorterDuff.Mode.SRC_ATOP); paint.setColorFilter(filter); canvas.drawBitmap(source, 0, 0, paint); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { try { bitmap = Blur.rs(context, bitmap, factor.radius); } catch (RSRuntimeException e) { bitmap = Blur.stack(bitmap, factor.radius, true); } } else { bitmap = Blur.stack(bitmap, factor.radius, true); } if (factor.sampling == BlurFactor.DEFAULT_SAMPLING) { return bitmap; } else { Bitmap scaled = Bitmap.createScaledBitmap(bitmap, factor.width, factor.height, true); bitmap.recycle(); return scaled; } } @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1) public static Bitmap rs(Context context, Bitmap bitmap, int radius) throws RSRuntimeException { RenderScript rs = null; try { rs = RenderScript.create(context); Allocation input = Allocation.createFromBitmap(rs, bitmap, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT); Allocation output = Allocation.createTyped(rs, input.getType()); ScriptIntrinsicBlur blur = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs)); blur.setInput(input); blur.setRadius(radius); blur.forEach(output); output.copyTo(bitmap); } finally { if (rs != null) { rs.destroy(); } } return bitmap; } public static Bitmap stack(Bitmap sentBitmap, int radius, boolean canReuseInBitmap) { // Stack Blur v1.0 from // http://www.quasimondo.com/StackBlurForCanvas/StackBlurDemo.html // // Java Author: Mario Klingemann <mario at quasimondo.com> // http://incubator.quasimondo.com // created Feburary 29, 2004 // Android port : Yahel Bouaziz <yahel at kayenko.com> // http://www.kayenko.com // ported april 5th, 2012 // This is a compromise between Gaussian Blur and Box blur // It creates much better looking blurs than Box Blur, but is // 7x faster than my Gaussian Blur implementation. // // I called it Stack Blur because this describes best how this // filter works internally: it creates a kind of moving stack // of colors whilst scanning through the image. Thereby it // just has to add one new block of color to the right side // of the stack and remove the leftmost color. The remaining // colors on the topmost layer of the stack are either added on // or reduced by one, depending on if they are on the right or // on the left side of the stack. // // If you are using this algorithm in your code please add // the following line: // // Stack Blur Algorithm by Mario Klingemann <mario@quasimondo.com> Bitmap bitmap; if (canReuseInBitmap) { bitmap = sentBitmap; } else { bitmap = sentBitmap.copy(sentBitmap.getConfig(), true); } if (radius < 1) { return (null); } int w = bitmap.getWidth(); int h = bitmap.getHeight(); int[] pix = new int[w * h]; bitmap.getPixels(pix, 0, w, 0, 0, w, h); int wm = w - 1; int hm = h - 1; int wh = w * h; int div = radius + radius + 1; int r[] = new int[wh]; int g[] = new int[wh]; int b[] = new int[wh]; int rsum, gsum, bsum, x, y, i, p, yp, yi, yw; int vmin[] = new int[Math.max(w, h)]; int divsum = (div + 1) >> 1; divsum *= divsum; int dv[] = new int[256 * divsum]; for (i = 0; i < 256 * divsum; i++) { dv[i] = (i / divsum); } yw = yi = 0; int[][] stack = new int[div][3]; int stackpointer; int stackstart; int[] sir; int rbs; int r1 = radius + 1; int routsum, goutsum, boutsum; int rinsum, ginsum, binsum; for (y = 0; y < h; y++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; for (i = -radius; i <= radius; i++) { p = pix[yi + Math.min(wm, Math.max(i, 0))]; sir = stack[i + radius]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rbs = r1 - Math.abs(i); rsum += sir[0] * rbs; gsum += sir[1] * rbs; bsum += sir[2] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } } stackpointer = radius; for (x = 0; x < w; x++) { r[yi] = dv[rsum]; g[yi] = dv[gsum]; b[yi] = dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (y == 0) { vmin[x] = Math.min(x + radius + 1, wm); } p = pix[yw + vmin[x]]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[(stackpointer) % div]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi++; } yw += w; } for (x = 0; x < w; x++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; yp = -radius * w; for (i = -radius; i <= radius; i++) { yi = Math.max(0, yp) + x; sir = stack[i + radius]; sir[0] = r[yi]; sir[1] = g[yi]; sir[2] = b[yi]; rbs = r1 - Math.abs(i); rsum += r[yi] * rbs; gsum += g[yi] * rbs; bsum += b[yi] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } if (i < hm) { yp += w; } } yi = x; stackpointer = radius; for (y = 0; y < h; y++) { // Preserve alpha channel: ( 0xff000000 & pix[yi] ) pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16) | (dv[gsum] << 8) | dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (x == 0) { vmin[y] = Math.min(y + r1, hm) * w; } p = x + vmin[y]; sir[0] = r[p]; sir[1] = g[p]; sir[2] = b[p]; rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[stackpointer]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi += w; } } bitmap.setPixels(pix, 0, w, 0, 0, w, h); return (bitmap); } static class BlurFactor { public static final int DEFAULT_RADIUS = 25; public static final int DEFAULT_SAMPLING = 1; public int width; public int height; public int radius = DEFAULT_RADIUS; public int sampling = DEFAULT_SAMPLING; public int color = Color.TRANSPARENT; } }
package org.osmdroid.views.drawing; import android.graphics.Bitmap; import android.graphics.Canvas; import android.os.Looper; import org.osmdroid.tileprovider.ExpirableBitmapDrawable; import org.osmdroid.tileprovider.MapTileProviderBase; import org.osmdroid.tileprovider.TileStates; import org.osmdroid.util.RectL; import org.osmdroid.views.MapView; import org.osmdroid.views.Projection; import org.osmdroid.views.overlay.Overlay; import org.osmdroid.views.overlay.TilesOverlay; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.List; /** * Create a bitmap in the background from {@link MapView}-like data but without a {@link MapView} * * @author Fabrice Fontaine * @since 6.1.0 */ public class MapSnapshot implements Runnable { public interface MapSnapshotable { void callback(final MapSnapshot pMapSnapshot); } public enum Status { NOTHING, STARTED, TILES_OK, PAINTING, CANVAS_OK } /** * The INCLUDE_FLAGs let you precise the tiles you accept in your snapshot, * depending on their states. * For instance, if your flag includes INCLUDE_FLAG_SCALED, that means that you accept * scaled tiles in your output. * If your flag equals INCLUDE_FLAG_UPTODATE, that means that you accept only up-to-date tiles, * and implicitly that you may have to wait, and need background downloads. * Cf. {@link ExpirableBitmapDrawable} */ public static final int INCLUDE_FLAG_UPTODATE = 1; public static final int INCLUDE_FLAG_EXPIRED = 2; public static final int INCLUDE_FLAG_SCALED = 4; public static final int INCLUDE_FLAG_NOTFOUND = 8; public static final int INCLUDE_FLAGS_ALL = INCLUDE_FLAG_UPTODATE + INCLUDE_FLAG_EXPIRED + INCLUDE_FLAG_SCALED + INCLUDE_FLAG_NOTFOUND; /** * To be used in View-related Overlay's draw methods. * Not only are we not able to include View's in the snapshots, * but drawing those View's can make the app crash. * A solution is to catch an Exception when drawing, * and to be lenient when we're not on the UI thread */ public static boolean isUIThread() { return Looper.myLooper() == Looper.getMainLooper(); } private final RectL mViewPort = new RectL(); private final int mIncludeFlags; private Projection mProjection; private MapSnapshotHandler mHandler; private MapSnapshotable mMapSnapshotable; private MapTileProviderBase mTileProvider; private TilesOverlay mTilesOverlay; private List<Overlay> mOverlays; private Status mStatus = Status.NOTHING; private Bitmap mBitmap; private boolean mIsDetached; public MapSnapshot(final MapSnapshotable pMapSnapshotable, final int pIncludeFlags, final MapView pMapView) { this(pMapSnapshotable, pIncludeFlags, pMapView.getTileProvider(), pMapView.getOverlays(), pMapView.getProjection() ); } public MapSnapshot(final MapSnapshotable pMapSnapshotable, final int pIncludeFlags, final MapTileProviderBase pTileProvider, final List<Overlay> pOverlays, final Projection pProjection) { mMapSnapshotable = pMapSnapshotable; mIncludeFlags = pIncludeFlags; mTileProvider = pTileProvider; mOverlays = pOverlays; mProjection = pProjection; mProjection.getMercatorViewPort(mViewPort); mTilesOverlay = new TilesOverlay(mTileProvider, null); mTilesOverlay.setHorizontalWrapEnabled(mProjection.isHorizontalWrapEnabled()); mTilesOverlay.setVerticalWrapEnabled(mProjection.isVerticalWrapEnabled()); mHandler = new MapSnapshotHandler(this); mTileProvider.getTileRequestCompleteHandlers().add(mHandler); } @Override public void run() { mStatus = Status.STARTED; refreshASAP(); } public Status getStatus() { return mStatus; } public Bitmap getBitmap() { return mBitmap; } public boolean save(final File pFile) { return save(mBitmap, pFile); } public void onDetach() { mIsDetached = true; mProjection = null; mTileProvider.getTileRequestCompleteHandlers().remove(mHandler); mTileProvider.detach(); mTileProvider = null; mHandler.destroy(); mHandler = null; mMapSnapshotable = null; mTilesOverlay = null; mOverlays = null; mBitmap = null; } private void draw() { mBitmap = Bitmap.createBitmap(mProjection.getWidth(), mProjection.getHeight(), Bitmap.Config.ARGB_8888); final Canvas canvas = new Canvas(mBitmap); mProjection.save(canvas, true, false); mTilesOverlay.drawTiles(canvas, mProjection, mProjection.getZoomLevel(), mViewPort); if (mOverlays != null) { for (final Overlay overlay : mOverlays) { if (overlay != null && overlay.isEnabled()) { overlay.draw(canvas, mProjection); } } } mProjection.restore(canvas, false); } /** * Putting the tile in the memory cache by trying to draw them (but on a null Canvas) */ private void refresh() { if (!refreshCheckStart()) { return; } final TileStates tileStates = mTilesOverlay.getTileStates(); do { mTilesOverlay.drawTiles(null, mProjection, mProjection.getZoomLevel(), mViewPort); boolean ready = true; if (mIncludeFlags != 0 && mIncludeFlags != INCLUDE_FLAGS_ALL) { if (ready && (mIncludeFlags & INCLUDE_FLAG_UPTODATE) == 0 && tileStates.getUpToDate() != 0) { ready = false; } if (ready && (mIncludeFlags & INCLUDE_FLAG_EXPIRED) == 0 && tileStates.getExpired() != 0) { ready = false; } if (ready && (mIncludeFlags & INCLUDE_FLAG_SCALED) == 0 && tileStates.getScaled() != 0) { ready = false; } if (ready && (mIncludeFlags & INCLUDE_FLAG_NOTFOUND) == 0 && tileStates.getNotFound() != 0) { ready = false; } } if (ready) { if (mStatus == Status.CANVAS_OK || mStatus == Status.PAINTING) { return; } if (!refreshCheckFinish()) { return; } mStatus = Status.PAINTING; if (mIsDetached) { return; } draw(); mStatus = Status.CANVAS_OK; final MapSnapshotable mapSnapshotable = mMapSnapshotable; if (mapSnapshotable != null) { mapSnapshotable.callback(MapSnapshot.this); } } } while (refreshCheckEnd()); } synchronized private boolean refreshCheckStart() { if (mIsDetached) { return false; } if (mAlreadyFinished) { return false; } if (!mOneMoreTime) { return false; } if (mCurrentlyRunning) { return false; } mOneMoreTime = false; mCurrentlyRunning = true; return true; } synchronized private boolean refreshCheckEnd() { if (mIsDetached) { return false; } if (mAlreadyFinished) { return false; } if (!mOneMoreTime) { mCurrentlyRunning = false; return false; } mOneMoreTime = false; return true; } synchronized private boolean refreshCheckFinish() { final boolean result = !mAlreadyFinished; mAlreadyFinished = true; return result; } synchronized private boolean refreshAgain() { mOneMoreTime = true; return !mCurrentlyRunning; } public void refreshASAP() { if (refreshAgain()) { refresh(); } } private boolean mOneMoreTime; private boolean mCurrentlyRunning; private boolean mAlreadyFinished; private static boolean save(Bitmap pBitmap, File pFile) { FileOutputStream out = null; try { out = new FileOutputStream(pFile.getAbsolutePath()); pBitmap.compress(Bitmap.CompressFormat.PNG, 100, out); return true; } catch (Exception e) { e.printStackTrace(); } finally { try { if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } return false; } }
package ibis.io; import java.io.IOException; import java.nio.ByteBuffer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SingleBufferArrayOutputStream extends DataOutputStream { private static final Logger logger = LoggerFactory .getLogger(BufferedArrayOutputStream.class); private static final boolean DEBUG = IOProperties.DEBUG; /** Size of the buffer in which output data is collected. */ private final int BUF_SIZE; /** The buffer in which output data is collected. */ private byte[] buffer; /** Size of the buffer in which output data is collected. */ private int index = 0; private int offset = 0; /** Object used for conversion of primitive types to bytes. */ private Conversion conversion; /** * Constructor. * * @param buffer * the underlying byte buffer */ public SingleBufferArrayOutputStream(byte[] buffer) { this.buffer = buffer; BUF_SIZE = buffer.length; conversion = Conversion.loadConversion(false); } public void reset() { index = 0; } public long bytesWritten() { return index - offset; } public void resetBytesWritten() { offset = index; } /** * Checks if there is space for <code>incr</code> more bytes and if not, * the buffer is written to the underlying <code>OutputStream</code>. * * @param incr * the space requested * @exception IOException * in case of trouble. */ private void checkFreeSpace(int bytes) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("checkFreeSpace(" + bytes + ") : " + " " + (index + bytes >= BUF_SIZE) + " " + (index) + ")"); } if (index + bytes > BUF_SIZE) { throw new IOException("End of buffer reached (" + index + "+" + bytes + " > " + BUF_SIZE + ")"); } } public void write(int b) throws IOException { writeByte((byte) b); } public void writeBoolean(boolean value) throws IOException { byte b = conversion.boolean2byte(value); checkFreeSpace(1); buffer[index++] = b; } public void writeByte(byte value) throws IOException { checkFreeSpace(1); buffer[index++] = value; } public void writeChar(char value) throws IOException { checkFreeSpace(Constants.SIZEOF_CHAR); conversion.char2byte(value, buffer, index); index += Constants.SIZEOF_CHAR; } public void writeShort(short value) throws IOException { checkFreeSpace(Constants.SIZEOF_SHORT); conversion.short2byte(value, buffer, index); index += Constants.SIZEOF_SHORT; } public void writeInt(int value) throws IOException { checkFreeSpace(Constants.SIZEOF_INT); conversion.int2byte(value, buffer, index); index += Constants.SIZEOF_INT; } public void writeLong(long value) throws IOException { checkFreeSpace(Constants.SIZEOF_LONG); conversion.long2byte(value, buffer, index); index += Constants.SIZEOF_LONG; } public void writeFloat(float value) throws IOException { checkFreeSpace(Constants.SIZEOF_FLOAT); conversion.float2byte(value, buffer, index); index += Constants.SIZEOF_FLOAT; } public void writeDouble(double value) throws IOException { checkFreeSpace(Constants.SIZEOF_DOUBLE); conversion.double2byte(value, buffer, index); index += Constants.SIZEOF_DOUBLE; } public void write(byte[] b) throws IOException { writeArray(b); } public void write(byte[] b, int off, int len) throws IOException { writeArray(b, off, len); } public void writeArray(boolean[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(boolean[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Constants.SIZEOF_BOOLEAN; checkFreeSpace(toWrite); conversion.boolean2byte(ref, off, len, buffer, index); index += toWrite; } public void writeArray(byte[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(byte[" + off + " ... " + (off + len) + "])"); } checkFreeSpace(len); System.arraycopy(ref, off, buffer, index, len); index += len; } public void writeArray(char[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(char[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Constants.SIZEOF_CHAR; checkFreeSpace(toWrite); conversion.char2byte(ref, off, len, buffer, index); index += toWrite; } public void writeArray(short[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(short[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Constants.SIZEOF_SHORT; checkFreeSpace(toWrite); conversion.short2byte(ref, off, len, buffer, index); index += toWrite; } public void writeArray(int[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger .debug("writeArray(int[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Conversion.INT_SIZE; checkFreeSpace(toWrite); conversion.int2byte(ref, off, len, buffer, index); index += toWrite; } public void writeArray(long[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(long[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Conversion.INT_SIZE; checkFreeSpace(toWrite); conversion.long2byte(ref, off, len, buffer, index); index += toWrite; } public void writeArray(float[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(float[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Conversion.FLOAT_SIZE; checkFreeSpace(toWrite); conversion.float2byte(ref, off, len, buffer, index); index += toWrite; } public void writeArray(double[] ref, int off, int len) throws IOException { if (DEBUG && logger.isDebugEnabled()) { logger.debug("writeArray(double[" + off + " ... " + (off + len) + "])"); } final int toWrite = len * Conversion.FLOAT_SIZE; checkFreeSpace(toWrite); conversion.double2byte(ref, off, len, buffer, index); index += toWrite; } public void flush() throws IOException { // empty } public void finish() { // empty } public boolean finished() { return true; } public void close() throws IOException { // empty } public int bufferSize() { return BUF_SIZE; } public void writeByteBuffer(ByteBuffer value) throws IOException { int len = value.limit() - value.position(); checkFreeSpace(len); value.get(buffer, index, len); index += len; } }
/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.media3.exoplayer; import static androidx.media3.test.utils.ExoPlayerTestRunner.AUDIO_FORMAT; import static androidx.media3.test.utils.ExoPlayerTestRunner.VIDEO_FORMAT; import static androidx.media3.test.utils.FakeTimeline.TimelineWindowDefinition.DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US; import static com.google.common.truth.Truth.assertThat; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.robolectric.Shadows.shadowOf; import android.net.Uri; import android.os.Handler; import android.os.Looper; import android.util.Pair; import androidx.media3.common.AdPlaybackState; import androidx.media3.common.C; import androidx.media3.common.MediaItem; import androidx.media3.common.PlaybackParameters; import androidx.media3.common.Player; import androidx.media3.common.Timeline; import androidx.media3.common.TracksInfo; import androidx.media3.common.util.Clock; import androidx.media3.exoplayer.analytics.AnalyticsCollector; import androidx.media3.exoplayer.analytics.DefaultAnalyticsCollector; import androidx.media3.exoplayer.analytics.PlayerId; import androidx.media3.exoplayer.source.MediaSource.MediaPeriodId; import androidx.media3.exoplayer.source.MediaSource.MediaSourceCaller; import androidx.media3.exoplayer.source.SinglePeriodTimeline; import androidx.media3.exoplayer.source.ads.ServerSideAdInsertionMediaSource; import androidx.media3.exoplayer.source.ads.SinglePeriodAdTimeline; import androidx.media3.exoplayer.trackselection.ExoTrackSelection; import androidx.media3.exoplayer.trackselection.TrackSelector; import androidx.media3.exoplayer.trackselection.TrackSelectorResult; import androidx.media3.exoplayer.upstream.Allocator; import androidx.media3.test.utils.FakeMediaSource; import androidx.media3.test.utils.FakeShuffleOrder; import androidx.media3.test.utils.FakeTimeline; import androidx.media3.test.utils.FakeTimeline.TimelineWindowDefinition; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.common.collect.ImmutableList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; /** Unit tests for {@link MediaPeriodQueue}. */ @RunWith(AndroidJUnit4.class) public final class MediaPeriodQueueTest { private static final long CONTENT_DURATION_US = 30 * C.MICROS_PER_SECOND; private static final long AD_DURATION_US = 10 * C.MICROS_PER_SECOND; private static final long FIRST_AD_START_TIME_US = 10 * C.MICROS_PER_SECOND; private static final long SECOND_AD_START_TIME_US = 20 * C.MICROS_PER_SECOND; private static final Timeline CONTENT_TIMELINE = new SinglePeriodTimeline( CONTENT_DURATION_US, /* isSeekable= */ true, /* isDynamic= */ false, /* useLiveConfiguration= */ false, /* manifest= */ null, MediaItem.fromUri(Uri.EMPTY)); private static final Uri AD_URI = Uri.EMPTY; private MediaPeriodQueue mediaPeriodQueue; private AdPlaybackState adPlaybackState; private Object firstPeriodUid; private PlaybackInfo playbackInfo; private RendererCapabilities[] rendererCapabilities; private TrackSelector trackSelector; private Allocator allocator; private MediaSourceList mediaSourceList; private FakeMediaSource fakeMediaSource; @Before public void setUp() { AnalyticsCollector analyticsCollector = new DefaultAnalyticsCollector(Clock.DEFAULT); analyticsCollector.setPlayer( new ExoPlayer.Builder(ApplicationProvider.getApplicationContext()).build(), Looper.getMainLooper()); mediaPeriodQueue = new MediaPeriodQueue(analyticsCollector, new Handler(Looper.getMainLooper())); mediaSourceList = new MediaSourceList( mock(MediaSourceList.MediaSourceListInfoRefreshListener.class), analyticsCollector, new Handler(Looper.getMainLooper()), PlayerId.UNSET); rendererCapabilities = new RendererCapabilities[0]; trackSelector = mock(TrackSelector.class); allocator = mock(Allocator.class); } @Test public void getNextMediaPeriodInfo_withoutAds_returnsLastMediaPeriodInfo() { setupAdTimeline(/* no ad groups */ ); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withPrerollAd_returnsCorrectMediaPeriodInfos() { setupAdTimeline(/* adGroupTimesUs...= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 0); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, AD_DURATION_US, /* contentPositionUs= */ C.TIME_UNSET, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withMidrollAds_returnsCorrectMediaPeriodInfos() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ FIRST_AD_START_TIME_US, /* durationUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 0); advance(); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, /* adDurationUs= */ C.TIME_UNSET, /* contentPositionUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false); setAdGroupLoaded(/* adGroupIndex= */ 0); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, AD_DURATION_US, /* contentPositionUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ FIRST_AD_START_TIME_US, /* requestedContentPositionUs= */ FIRST_AD_START_TIME_US, /* endPositionUs= */ SECOND_AD_START_TIME_US, /* durationUs= */ SECOND_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 1); advance(); setAdGroupLoaded(/* adGroupIndex= */ 1); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 1, AD_DURATION_US, /* contentPositionUs= */ SECOND_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ SECOND_AD_START_TIME_US, /* requestedContentPositionUs= */ SECOND_AD_START_TIME_US, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withMidrollAndPostroll_returnsCorrectMediaPeriodInfos() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ FIRST_AD_START_TIME_US, /* durationUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 0); advance(); setAdGroupLoaded(/* adGroupIndex= */ 0); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, AD_DURATION_US, /* contentPositionUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ FIRST_AD_START_TIME_US, /* requestedContentPositionUs= */ FIRST_AD_START_TIME_US, /* endPositionUs= */ C.TIME_END_OF_SOURCE, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 1); advance(); setAdGroupLoaded(/* adGroupIndex= */ 1); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 1, AD_DURATION_US, /* contentPositionUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ CONTENT_DURATION_US - 1, /* requestedContentPositionUs= */ CONTENT_DURATION_US, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withAdGroupResumeOffsets_returnsCorrectMediaPeriodInfos() { adPlaybackState = new AdPlaybackState( /* adsId= */ new Object(), /* adGroupTimesUs...= */ 0, FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE) .withContentDurationUs(CONTENT_DURATION_US) .withContentResumeOffsetUs(/* adGroupIndex= */ 0, /* contentResumeOffsetUs= */ 2000) .withContentResumeOffsetUs(/* adGroupIndex= */ 1, /* contentResumeOffsetUs= */ 3000) .withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 4000); SinglePeriodAdTimeline adTimeline = new SinglePeriodAdTimeline(CONTENT_TIMELINE, adPlaybackState); setupTimeline(adTimeline); setAdGroupLoaded(/* adGroupIndex= */ 0); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, AD_DURATION_US, /* contentPositionUs= */ C.TIME_UNSET, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 2000, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ FIRST_AD_START_TIME_US, /* durationUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 1); advance(); setAdGroupLoaded(/* adGroupIndex= */ 1); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 1, AD_DURATION_US, /* contentPositionUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ FIRST_AD_START_TIME_US + 3000, /* requestedContentPositionUs= */ FIRST_AD_START_TIME_US, /* endPositionUs= */ C.TIME_END_OF_SOURCE, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 2); advance(); setAdGroupLoaded(/* adGroupIndex= */ 2); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 2, AD_DURATION_US, /* contentPositionUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ CONTENT_DURATION_US - 1, /* requestedContentPositionUs= */ CONTENT_DURATION_US, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withServerSideInsertedAds_returnsCorrectMediaPeriodInfos() { adPlaybackState = new AdPlaybackState( /* adsId= */ new Object(), /* adGroupTimesUs...= */ 0, FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US) .withContentDurationUs(CONTENT_DURATION_US) .withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true) .withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true) .withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true); SinglePeriodAdTimeline adTimeline = new SinglePeriodAdTimeline(CONTENT_TIMELINE, adPlaybackState); setupTimeline(adTimeline); setAdGroupLoaded(/* adGroupIndex= */ 0); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, AD_DURATION_US, /* contentPositionUs= */ C.TIME_UNSET, /* isFollowedByTransitionToSameStream= */ true); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ FIRST_AD_START_TIME_US, /* durationUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ true, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 1); advance(); setAdGroupLoaded(/* adGroupIndex= */ 1); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 1, AD_DURATION_US, /* contentPositionUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ true); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ FIRST_AD_START_TIME_US, /* requestedContentPositionUs= */ FIRST_AD_START_TIME_US, /* endPositionUs= */ SECOND_AD_START_TIME_US, /* durationUs= */ SECOND_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ true, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 2); advance(); setAdGroupLoaded(/* adGroupIndex= */ 2); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 2, AD_DURATION_US, /* contentPositionUs= */ SECOND_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ true); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ SECOND_AD_START_TIME_US, /* requestedContentPositionUs= */ SECOND_AD_START_TIME_US, /* endPositionUs= */ CONTENT_DURATION_US, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withPostrollLoadError_returnsEmptyFinalMediaPeriodInfo() { setupAdTimeline(/* adGroupTimesUs...= */ C.TIME_END_OF_SOURCE); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ C.TIME_END_OF_SOURCE, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 0); advance(); setAdGroupFailedToLoad(/* adGroupIndex= */ 0); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ CONTENT_DURATION_US - 1, /* requestedContentPositionUs= */ CONTENT_DURATION_US, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_withPlayedAdGroups_returnsCorrectMediaPeriodInfos() { setupAdTimeline(/* adGroupTimesUs...= */ 0, FIRST_AD_START_TIME_US, C.TIME_END_OF_SOURCE); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); setAdGroupLoaded(/* adGroupIndex= */ 2); assertNextMediaPeriodInfoIsAd( /* adGroupIndex= */ 0, AD_DURATION_US, /* contentPositionUs= */ C.TIME_UNSET, /* isFollowedByTransitionToSameStream= */ false); setAdGroupPlayed(/* adGroupIndex= */ 0); clear(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ FIRST_AD_START_TIME_US, /* durationUs= */ FIRST_AD_START_TIME_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 1); setAdGroupPlayed(/* adGroupIndex= */ 1); clear(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ C.TIME_END_OF_SOURCE, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ false, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ 2); setAdGroupPlayed(/* adGroupIndex= */ 2); clear(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ firstPeriodUid, /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void getNextMediaPeriodInfo_inMultiPeriodWindow_returnsCorrectMediaPeriodInfos() { setupTimeline( new FakeTimeline( new TimelineWindowDefinition( /* periodCount= */ 2, /* id= */ new Object(), /* isSeekable= */ false, /* isDynamic= */ false, /* durationUs= */ 2 * CONTENT_DURATION_US))); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ playbackInfo.timeline.getUidOfPeriod(/* periodIndex= */ 0), /* startPositionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US + DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ false, /* nextAdGroupIndex= */ C.INDEX_UNSET); advance(); assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( /* periodUid= */ playbackInfo.timeline.getUidOfPeriod(/* periodIndex= */ 1), /* startPositionUs= */ 0, /* requestedContentPositionUs= */ 0, /* endPositionUs= */ C.TIME_UNSET, /* durationUs= */ CONTENT_DURATION_US, /* isFollowedByTransitionToSameStream= */ false, /* isLastInPeriod= */ true, /* isLastInWindow= */ true, /* nextAdGroupIndex= */ C.INDEX_UNSET); } @Test public void updateQueuedPeriods_withDurationChangeInPlayingContent_handlesChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); enqueueNext(); // Content before ad. enqueueNext(); // Ad. enqueueNext(); // Content after ad. // Change position of first ad (= change duration of playing content before first ad). updateAdPlaybackStateAndTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US - 2000); setAdGroupLoaded(/* adGroupIndex= */ 0); long maxRendererReadPositionUs = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US - 3000; boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, maxRendererReadPositionUs); assertThat(changeHandled).isTrue(); assertThat(getQueueLength()).isEqualTo(1); assertThat(mediaPeriodQueue.getPlayingPeriod().info.endPositionUs) .isEqualTo(FIRST_AD_START_TIME_US - 2000); assertThat(mediaPeriodQueue.getPlayingPeriod().info.durationUs) .isEqualTo(FIRST_AD_START_TIME_US - 2000); } @Test public void updateQueuedPeriods_withDurationChangeInPlayingContentAfterReadingPosition_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); enqueueNext(); // Content before ad. enqueueNext(); // Ad. enqueueNext(); // Content after ad. // Change position of first ad (= change duration of playing content before first ad). updateAdPlaybackStateAndTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US - 2000); setAdGroupLoaded(/* adGroupIndex= */ 0); long maxRendererReadPositionUs = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US - 1000; boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, maxRendererReadPositionUs); assertThat(changeHandled).isFalse(); assertThat(getQueueLength()).isEqualTo(1); assertThat(mediaPeriodQueue.getPlayingPeriod().info.endPositionUs) .isEqualTo(FIRST_AD_START_TIME_US - 2000); assertThat(mediaPeriodQueue.getPlayingPeriod().info.durationUs) .isEqualTo(FIRST_AD_START_TIME_US - 2000); } @Test public void updateQueuedPeriods_withDurationChangeInPlayingContentAfterReadingPositionInServerSideInsertedAd_handlesChangeAndRemovesPeriodsAfterChangedPeriod() { adPlaybackState = new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimes... */ FIRST_AD_START_TIME_US) .withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true); SinglePeriodAdTimeline adTimeline = new SinglePeriodAdTimeline(CONTENT_TIMELINE, adPlaybackState); setupTimeline(adTimeline); setAdGroupLoaded(/* adGroupIndex= */ 0); enqueueNext(); // Content before ad. enqueueNext(); // Ad. enqueueNext(); // Content after ad. // Change position of first ad (= change duration of playing content before first ad). adPlaybackState = new AdPlaybackState( /* adsId= */ new Object(), /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US - 2000) .withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true); updateTimeline(); setAdGroupLoaded(/* adGroupIndex= */ 0); long maxRendererReadPositionUs = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US - 1000; boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, maxRendererReadPositionUs); assertThat(changeHandled).isTrue(); assertThat(getQueueLength()).isEqualTo(1); assertThat(mediaPeriodQueue.getPlayingPeriod().info.endPositionUs) .isEqualTo(FIRST_AD_START_TIME_US - 2000); assertThat(mediaPeriodQueue.getPlayingPeriod().info.durationUs) .isEqualTo(FIRST_AD_START_TIME_US - 2000); } @Test public void updateQueuedPeriods_withDurationChangeAfterReadingPeriod_handlesChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); enqueueNext(); // Content before first ad. enqueueNext(); // First ad. enqueueNext(); // Content between ads. enqueueNext(); // Second ad. // Change position of second ad (= change duration of content between ads). updateAdPlaybackStateAndTimeline( /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, /* maxRendererReadPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US); assertThat(changeHandled).isTrue(); assertThat(getQueueLength()).isEqualTo(3); } @Test public void updateQueuedPeriods_withDurationChangeBeforeReadingPeriod_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); enqueueNext(); // Content before first ad. enqueueNext(); // First ad. enqueueNext(); // Content between ads. enqueueNext(); // Second ad. advanceReading(); // Reading first ad. advanceReading(); // Reading content between ads. advanceReading(); // Reading second ad. // Change position of second ad (= change duration of content between ads). updateAdPlaybackStateAndTimeline( /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); long maxRendererReadPositionUs = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US; boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, maxRendererReadPositionUs); assertThat(changeHandled).isFalse(); assertThat(getQueueLength()).isEqualTo(3); } @Test public void updateQueuedPeriods_withDurationChangeInReadingPeriodAfterReadingPosition_handlesChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); enqueueNext(); // Content before first ad. enqueueNext(); // First ad. enqueueNext(); // Content between ads. enqueueNext(); // Second ad. advanceReading(); // Reading first ad. advanceReading(); // Reading content between ads. // Change position of second ad (= change duration of content between ads). updateAdPlaybackStateAndTimeline( /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); long readingPositionAtStartOfContentBetweenAds = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + FIRST_AD_START_TIME_US + AD_DURATION_US; boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, /* maxRendererReadPositionUs= */ readingPositionAtStartOfContentBetweenAds); assertThat(changeHandled).isTrue(); assertThat(getQueueLength()).isEqualTo(3); } @Test public void updateQueuedPeriods_withDurationChangeInReadingPeriodBeforeReadingPosition_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); enqueueNext(); // Content before first ad. enqueueNext(); // First ad. enqueueNext(); // Content between ads. enqueueNext(); // Second ad. advanceReading(); // Reading first ad. advanceReading(); // Reading content between ads. // Change position of second ad (= change duration of content between ads). updateAdPlaybackStateAndTimeline( /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); long readingPositionAtEndOfContentBetweenAds = MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US + SECOND_AD_START_TIME_US + AD_DURATION_US; boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, /* maxRendererReadPositionUs= */ readingPositionAtEndOfContentBetweenAds); assertThat(changeHandled).isFalse(); assertThat(getQueueLength()).isEqualTo(3); } @Test public void updateQueuedPeriods_withDurationChangeInReadingPeriodReadToEnd_doesntHandleChangeAndRemovesPeriodsAfterChangedPeriod() { setupAdTimeline(/* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); enqueueNext(); // Content before first ad. enqueueNext(); // First ad. enqueueNext(); // Content between ads. enqueueNext(); // Second ad. advanceReading(); // Reading first ad. advanceReading(); // Reading content between ads. // Change position of second ad (= change duration of content between ads). updateAdPlaybackStateAndTimeline( /* adGroupTimesUs...= */ FIRST_AD_START_TIME_US, SECOND_AD_START_TIME_US - 1000); setAdGroupLoaded(/* adGroupIndex= */ 0); setAdGroupLoaded(/* adGroupIndex= */ 1); boolean changeHandled = mediaPeriodQueue.updateQueuedPeriods( playbackInfo.timeline, /* rendererPositionUs= */ MediaPeriodQueue.INITIAL_RENDERER_POSITION_OFFSET_US, /* maxRendererReadPositionUs= */ C.TIME_END_OF_SOURCE); assertThat(changeHandled).isFalse(); assertThat(getQueueLength()).isEqualTo(3); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_behindAdPositionInSinglePeriodTimeline_resolvesToAd() { long adPositionUs = DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US + 10_000; AdPlaybackState adPlaybackState = new AdPlaybackState("adsId", adPositionUs); adPlaybackState = adPlaybackState.withAdDurationsUs(/* adGroupIndex= */ 0, 5_000); Object windowUid = new Object(); FakeTimeline timeline = new FakeTimeline( new TimelineWindowDefinition( /* periodCount= */ 1, /* id= */ windowUid, /* isSeekable= */ true, /* isDynamic= */ false, TimelineWindowDefinition.DEFAULT_WINDOW_DURATION_US, adPlaybackState)); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, /* periodUid= */ new Pair<>(windowUid, 0), adPositionUs + 1); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowUid, 0)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_toAdPositionInSinglePeriodTimeline_resolvesToAd() { long adPositionUs = DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US + 10_000; AdPlaybackState adPlaybackState = new AdPlaybackState("adsId", adPositionUs); adPlaybackState = adPlaybackState.withAdDurationsUs(/* adGroupIndex= */ 0, 5_000); Object windowUid = new Object(); FakeTimeline timeline = new FakeTimeline( new TimelineWindowDefinition( /* periodCount= */ 1, /* id= */ windowUid, /* isSeekable= */ true, /* isDynamic= */ false, TimelineWindowDefinition.DEFAULT_WINDOW_DURATION_US, adPlaybackState)); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, /* periodUid= */ new Pair<>(windowUid, 0), adPositionUs); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowUid, 0)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_beforeAdPositionInSinglePeriodTimeline_seekNotAdjusted() { long adPositionUs = DEFAULT_WINDOW_OFFSET_IN_FIRST_PERIOD_US + 10_000; AdPlaybackState adPlaybackState = new AdPlaybackState("adsId", adPositionUs).withAdDurationsUs(/* adGroupIndex= */ 0, 5_000); Object windowUid = new Object(); FakeTimeline timeline = new FakeTimeline( new TimelineWindowDefinition( /* periodCount= */ 1, /* id= */ windowUid, /* isSeekable= */ true, /* isDynamic= */ false, TimelineWindowDefinition.DEFAULT_WINDOW_DURATION_US, adPlaybackState)); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowUid, 0), adPositionUs - 1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowUid, 0)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(0); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_behindAdInMultiPeriodTimeline_rollForward() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ true, false, true, true, true, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 1), /* positionUs= */ 1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 0)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 5), /* positionUs= */ 0); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 2)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_behindAdInMultiPeriodAllAdsPlayed_seekNotAdjusted() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 4, /* isAdPeriodFlags...= */ true, false, true, true, true, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 1), /* positionUs= */ 11); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 1)); mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 5), /* positionUs= */ 33); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 5)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_behindAdInMultiPeriodFirstTwoAdsPlayed_rollForward() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 2, /* isAdPeriodFlags...= */ true, false, true, true, true, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 5), /* positionUs= */ 33); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 3)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_beforeAdInMultiPeriodTimeline_seekNotAdjusted() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ false, true); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 0), /* positionUs= */ 33); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 0)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_toUnplayedAdInMultiPeriodTimeline_resolvedAsAd() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ false, true, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 1), /* positionUs= */ 0); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 1)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_toPlayedAdInMultiPeriodTimeline_skipPlayedAd() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 1, /* isAdPeriodFlags...= */ false, true, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 1), /* positionUs= */ 0); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 2)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_toStartOfWindowPlayedAdPreroll_skipsPlayedPrerolls() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 2, /* isAdPeriodFlags...= */ true, true, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 0), /* positionUs= */ 0); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 2)); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_toPlayedPostrolls_skipsAllButLastPostroll() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 4, /* isAdPeriodFlags...= */ false, true, true, true, true); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 1), /* positionUs= */ 0); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 4)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(-1); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_consecutiveContentPeriods_rollForward() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ true, false, false, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 3), /* positionUs= */ 10_000); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 0)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(0); assertThat(mediaPeriodId.adIndexInAdGroup).isEqualTo(0); assertThat(mediaPeriodId.nextAdGroupIndex).isEqualTo(-1); } @Test public void resolveMediaPeriodIdForAdsAfterPeriodPositionChange_onlyConsecutiveContentPeriods_seekNotAdjusted() throws InterruptedException { Object windowId = new Object(); Timeline timeline = createMultiPeriodServerSideInsertedTimeline( windowId, /* numberOfPlayedAds= */ 0, /* isAdPeriodFlags...= */ false, false, false, false); MediaPeriodId mediaPeriodId = mediaPeriodQueue.resolveMediaPeriodIdForAdsAfterPeriodPositionChange( timeline, new Pair<>(windowId, 3), /* positionUs= */ 10_000); assertThat(mediaPeriodId.periodUid).isEqualTo(new Pair<>(windowId, 3)); assertThat(mediaPeriodId.adGroupIndex).isEqualTo(-1); } private void setupAdTimeline(long... adGroupTimesUs) { adPlaybackState = new AdPlaybackState(/* adsId= */ new Object(), adGroupTimesUs) .withContentDurationUs(CONTENT_DURATION_US); SinglePeriodAdTimeline adTimeline = new SinglePeriodAdTimeline(CONTENT_TIMELINE, adPlaybackState); setupTimeline(adTimeline); } private void setupTimeline(Timeline timeline) { fakeMediaSource = new FakeMediaSource(timeline); MediaSourceList.MediaSourceHolder mediaSourceHolder = new MediaSourceList.MediaSourceHolder(fakeMediaSource, /* useLazyPreparation= */ false); mediaSourceList.setMediaSources( ImmutableList.of(mediaSourceHolder), new FakeShuffleOrder(/* length= */ 1)); mediaSourceHolder.mediaSource.prepareSource( mock(MediaSourceCaller.class), /* mediaTransferListener */ null, PlayerId.UNSET); Timeline playlistTimeline = mediaSourceList.createTimeline(); firstPeriodUid = playlistTimeline.getUidOfPeriod(/* periodIndex= */ 0); playbackInfo = new PlaybackInfo( playlistTimeline, mediaPeriodQueue.resolveMediaPeriodIdForAds( playlistTimeline, firstPeriodUid, /* positionUs= */ 0), /* requestedContentPositionUs= */ C.TIME_UNSET, /* discontinuityStartPositionUs= */ 0, Player.STATE_READY, /* playbackError= */ null, /* isLoading= */ false, /* trackGroups= */ null, /* trackSelectorResult= */ null, /* staticMetadata= */ ImmutableList.of(), /* loadingMediaPeriodId= */ null, /* playWhenReady= */ false, Player.PLAYBACK_SUPPRESSION_REASON_NONE, /* playbackParameters= */ PlaybackParameters.DEFAULT, /* bufferedPositionUs= */ 0, /* totalBufferedDurationUs= */ 0, /* positionUs= */ 0, /* offloadSchedulingEnabled= */ false, /* sleepingForOffload= */ false); } private void advance() { enqueueNext(); if (mediaPeriodQueue.getLoadingPeriod() != mediaPeriodQueue.getPlayingPeriod()) { advancePlaying(); } } private void advancePlaying() { mediaPeriodQueue.advancePlayingPeriod(); } private void advanceReading() { mediaPeriodQueue.advanceReadingPeriod(); } private void enqueueNext() { mediaPeriodQueue.enqueueNextMediaPeriodHolder( rendererCapabilities, trackSelector, allocator, mediaSourceList, getNextMediaPeriodInfo(), new TrackSelectorResult( new RendererConfiguration[0], new ExoTrackSelection[0], TracksInfo.EMPTY, /* info= */ null)); } private void clear() { mediaPeriodQueue.clear(); playbackInfo = playbackInfo.copyWithNewPosition( mediaPeriodQueue.resolveMediaPeriodIdForAds( mediaSourceList.createTimeline(), firstPeriodUid, /* positionUs= */ 0), /* positionUs= */ 0, /* requestedContentPositionUs= */ C.TIME_UNSET, /* discontinuityStartPositionUs= */ 0, /* totalBufferedDurationUs= */ 0, /* trackGroups= */ null, /* trackSelectorResult= */ null, /* staticMetadata= */ ImmutableList.of()); } private MediaPeriodInfo getNextMediaPeriodInfo() { return mediaPeriodQueue.getNextMediaPeriodInfo(/* rendererPositionUs= */ 0, playbackInfo); } private void setAdGroupLoaded(int adGroupIndex) { long[][] newDurations = new long[adPlaybackState.adGroupCount][]; for (int i = 0; i < adPlaybackState.adGroupCount; i++) { newDurations[i] = i == adGroupIndex ? new long[] {AD_DURATION_US} : adPlaybackState.getAdGroup(i).durationsUs; } adPlaybackState = adPlaybackState .withAdCount(adGroupIndex, /* adCount= */ 1) .withAdUri(adGroupIndex, /* adIndexInAdGroup= */ 0, AD_URI) .withAdDurationsUs(newDurations); updateTimeline(); } private void setAdGroupPlayed(int adGroupIndex) { for (int i = 0; i < adPlaybackState.getAdGroup(adGroupIndex).count; i++) { adPlaybackState = adPlaybackState.withPlayedAd(adGroupIndex, /* adIndexInAdGroup= */ i); } updateTimeline(); } private void setAdGroupFailedToLoad(int adGroupIndex) { adPlaybackState = adPlaybackState .withAdCount(adGroupIndex, /* adCount= */ 1) .withAdLoadError(adGroupIndex, /* adIndexInAdGroup= */ 0); updateTimeline(); } private void updateAdPlaybackStateAndTimeline(long... adGroupTimesUs) { adPlaybackState = new AdPlaybackState(/* adsId= */ new Object(), adGroupTimesUs) .withContentDurationUs(CONTENT_DURATION_US); updateTimeline(); } private void updateTimeline() { SinglePeriodAdTimeline adTimeline = new SinglePeriodAdTimeline(CONTENT_TIMELINE, adPlaybackState); fakeMediaSource.setNewSourceInfo(adTimeline); // Progress the looper so that the source info events have been executed. shadowOf(Looper.getMainLooper()).idle(); playbackInfo = playbackInfo.copyWithTimeline(mediaSourceList.createTimeline()); } private void assertGetNextMediaPeriodInfoReturnsContentMediaPeriod( Object periodUid, long startPositionUs, long requestedContentPositionUs, long endPositionUs, long durationUs, boolean isFollowedByTransitionToSameStream, boolean isLastInPeriod, boolean isLastInWindow, int nextAdGroupIndex) { assertThat(getNextMediaPeriodInfo()) .isEqualTo( new MediaPeriodInfo( new MediaPeriodId(periodUid, /* windowSequenceNumber= */ 0, nextAdGroupIndex), startPositionUs, requestedContentPositionUs, endPositionUs, durationUs, isFollowedByTransitionToSameStream, isLastInPeriod, isLastInWindow, /* isFinal= */ isLastInWindow)); } private void assertNextMediaPeriodInfoIsAd( int adGroupIndex, long adDurationUs, long contentPositionUs, boolean isFollowedByTransitionToSameStream) { assertThat(getNextMediaPeriodInfo()) .isEqualTo( new MediaPeriodInfo( new MediaPeriodId( firstPeriodUid, adGroupIndex, /* adIndexInAdGroup= */ 0, /* windowSequenceNumber= */ 0), /* startPositionUs= */ 0, contentPositionUs, /* endPositionUs= */ C.TIME_UNSET, adDurationUs, isFollowedByTransitionToSameStream, /* isLastInTimelinePeriod= */ false, /* isLastInTimelineWindow= */ false, /* isFinal= */ false)); } private int getQueueLength() { int length = 0; MediaPeriodHolder periodHolder = mediaPeriodQueue.getPlayingPeriod(); while (periodHolder != null) { length++; periodHolder = periodHolder.getNext(); } return length; } private static Timeline createMultiPeriodServerSideInsertedTimeline( Object windowId, int numberOfPlayedAds, boolean... isAdPeriodFlags) throws InterruptedException { FakeTimeline timeline = FakeTimeline.createMultiPeriodAdTimeline(windowId, numberOfPlayedAds, isAdPeriodFlags); ServerSideAdInsertionMediaSource serverSideAdInsertionMediaSource = new ServerSideAdInsertionMediaSource( new FakeMediaSource(timeline, VIDEO_FORMAT, AUDIO_FORMAT), contentTimeline -> false); serverSideAdInsertionMediaSource.setAdPlaybackStates( timeline.getAdPlaybackStates(/* windowIndex= */ 0)); AtomicReference<Timeline> serverSideAdInsertionTimelineRef = new AtomicReference<>(); CountDownLatch countDownLatch = new CountDownLatch(/* count= */ 1); serverSideAdInsertionMediaSource.prepareSource( (source, serverSideInsertedAdTimeline) -> { serverSideAdInsertionTimelineRef.set(serverSideInsertedAdTimeline); countDownLatch.countDown(); }, /* mediaTransferListener= */ null, new PlayerId()); if (!countDownLatch.await(/* timeout= */ 2, SECONDS)) { fail(); } return serverSideAdInsertionTimelineRef.get(); } }
/** * Copyright 2014 LinkedIn Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package com.linkedin.multitenant.main; import java.io.BufferedReader; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStreamReader; import java.net.ServerSocket; import java.net.Socket; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import com.linkedin.multitenant.exporter.ConsoleExporter; import com.linkedin.multitenant.exporter.DataExporter; import com.linkedin.multitenant.profiler.Profiler; import com.linkedin.multitenant.xml.XmlJob; import com.linkedin.multitenant.xml.XmlParser; import com.linkedin.multitenant.xml.XmlWorkPlan; public class RunExperiment { public enum Mode { LOAD, RUN } private static class SlavePair { public String host; public Integer port; } //Property flags public static final String FLAG_WORK_HOST = "work.host"; public static final String FLAG_WORK_PORT = "work.port"; public static final String FLAG_WORK_EXPORTER_CLASS = "work.exporterClass"; public static final String FLAG_WORK_STATUS_PERIOD = "work.status.period"; public static final String FLAG_JOB_NAME = "job.name"; public static final String FLAG_JOB_ROW = "job.rowCount"; //command line flags public static final String CMD_SLAVE = "slave"; public static final String CMD_PLAN = "plan"; public static final String CMD_SLAVEDATA = "slaveData"; public static final String CMD_LOAD = "load"; public static final String CMD_WAIT = "wait"; //default port slave connections public static final int SD_DEFAULT_PORT = 12981; private static final Logger _LOG = Logger.getLogger(RunExperiment.class); /** * Processes command line options.<br> * Allowed commands are:<br> * -slave=PORT: This machine is a slave machine.<br> * -plan=PATH: Workload plan file is given at location PATH * -slaveData=PATH: Slave information file is given at location PATH * -wait=SECONDS: One time waiting before thread execution * -load: Insert data to database. If this is not specified, then run workload. * @param args Command line options * @return If input is valid, then returns a map instance containing parameters.<br> * Otherwise returns null. */ private static Map<String, String> processCommandLine(String [] args) { Map<String, String> result = new HashMap<String, String>(); for(int a = 0; a<args.length; a++) { if(args[a].startsWith("-" + CMD_SLAVEDATA)) { String parts[] = args[a].split("="); if(parts.length == 1) { _LOG.error("file is not specified for slave data"); return null; } else if(parts.length > 3) { _LOG.error("multiple = character for slave data"); return null; } else { result.put(CMD_SLAVEDATA, parts[1]); _LOG.info("Read from console: " + CMD_SLAVEDATA + " " + parts[1]); } } else if(args[a].startsWith("-" + CMD_WAIT)) { String parts[] = args[a].split("="); if(parts.length == 1) { _LOG.error("waiting time is not specified"); return null; } else { result.put(CMD_WAIT, parts[1]); _LOG.info("Read from console: " + CMD_WAIT + " " + parts[1]); } } else if(args[a].startsWith("-" + CMD_PLAN)) { String parts[] = args[a].split("="); if(parts.length == 1) { _LOG.error("file is not specified for work plan"); return null; } else if(parts.length > 3) { _LOG.error("multiple = character for work plan"); return null; } else { result.put(CMD_PLAN, parts[1]); _LOG.info("Read from console: " + CMD_PLAN + " " + parts[1]); } } else if(args[a].startsWith("-" + CMD_SLAVE)) { String parts[] = args[a].split("="); String port = null; if(parts.length == 1) { _LOG.warn("port number is set to default, " + SD_DEFAULT_PORT); port = String.valueOf(SD_DEFAULT_PORT); } else if(parts.length == 2) { port = parts[1]; } else { _LOG.error("Slave port number is not assigned correctly"); return null; } result.put(CMD_SLAVE, port); _LOG.info("Read from console: " + CMD_SLAVE + " " + port); } else if(args[a].startsWith("-" + CMD_LOAD)) { result.put(CMD_LOAD, ""); _LOG.info("Read from console: " + CMD_LOAD); } else { _LOG.error("Unknown option: " + args[a]); return null; } } return result; } /** * Processes slave data file to get host name and port number for each slave. * @param filename Path to the slave data file. * @return If input is valid, then returns the mapping.<br> * Otherwise returns null. */ private static List<SlavePair> processSlaveData(String filename) throws Exception { List<SlavePair> result = new ArrayList<SlavePair>(); BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(filename))); String line = reader.readLine(); while(line != null) { String parsed[] = line.split(":"); if(parsed.length == 2) { String host = parsed[0]; Integer port = Integer.parseInt(parsed[1]); SlavePair newPair = new SlavePair(); newPair.host = host; newPair.port = port; result.add(newPair); _LOG.info("Slave info: " + host + " : " + port); } else if(parsed.length == 1) { String host = parsed[0]; Integer port = new Integer(SD_DEFAULT_PORT); SlavePair newPair = new SlavePair(); newPair.host = host; newPair.port = port; result.add(newPair); _LOG.info("Slave info: " + host + " : " + SD_DEFAULT_PORT); } else { _LOG.error("Slave data should be HOST:PORT"); reader.close(); return null; } line = reader.readLine(); } reader.close(); return result; } public static void main(String[] args) throws Exception { //read command line options Map<String, String> cmdOptions = processCommandLine(args); if(cmdOptions == null) { _LOG.fatal("Cannot process command line. Closing."); return; } //check waiting time if(cmdOptions.containsKey(CMD_WAIT)) { int waitTimeSeconds = Integer.parseInt(cmdOptions.get(CMD_WAIT)); if(waitTimeSeconds > 0) { _LOG.warn("Sleeping for " + waitTimeSeconds + " seconds"); Thread.sleep(waitTimeSeconds * 1000); _LOG.warn("Woke up from the sleep. Now starting proxy."); } } //some variables boolean isMaster = !(cmdOptions.containsKey(CMD_SLAVE)); List<SlavePair> slaveData = null; List<Socket> sockList = new ArrayList<Socket>(); byte workPlanData[] = null; int machineId; int machineCount = 0; Mode mode; if(cmdOptions.containsKey(CMD_LOAD)) mode = Mode.LOAD; else mode = Mode.RUN; //read slave information if(isMaster) { String slaveDataFilename = cmdOptions.get(CMD_SLAVEDATA); if(slaveDataFilename != null) { slaveData = processSlaveData(slaveDataFilename); if(slaveData == null) { _LOG.fatal("Cannot process slave data file"); return; } } else { slaveData = new ArrayList<SlavePair>(); } machineCount = slaveData.size() + 1; } //exchange work plan if(isMaster) { //if I am the server, then send work plan to all slave machines if any _LOG.info("This machine is master"); machineId = 0; //read config data to byte array String planFilename = cmdOptions.get(CMD_PLAN); if(planFilename == null) { _LOG.fatal("Plan file is not specified"); return; } else { File f = new File(planFilename); FileInputStream fs = new FileInputStream(f); workPlanData = new byte[(int)f.length()]; fs.read(workPlanData); fs.close(); } for(int a = 0; a<slaveData.size(); a++) { int clientId = a+1; String host = slaveData.get(a).host; int port = slaveData.get(a).port.intValue(); Socket newSock = new Socket(host, port); sockList.add(newSock); //send this client's ID DataOutputStream outStr = new DataOutputStream(newSock.getOutputStream()); outStr.writeInt(clientId); _LOG.debug("Sent ID to client " + clientId); //send number of machines outStr.writeInt(machineCount); _LOG.debug("Sent machine count to client " + clientId); //send mode outStr.writeUTF(mode.name()); _LOG.debug("Sent mode to client " + clientId); //send work plan data outStr.writeInt(workPlanData.length); outStr.write(workPlanData); _LOG.debug("Sent work plan data to client " + clientId); } } else { //if I am a slave, then listen to the master for work plan. _LOG.info("This machine is slave"); //listen from the specified port int port = Integer.parseInt(cmdOptions.get(CMD_SLAVE)); ServerSocket listener = new ServerSocket(port); Socket serverSock = listener.accept(); sockList.add(serverSock); listener.close(); //get my Id from the master DataInputStream inStr = new DataInputStream(serverSock.getInputStream()); machineId = inStr.readInt(); _LOG.info("Recevied machine ID from master, which is " + machineId); //get number of machines from the master machineCount = inStr.readInt(); _LOG.info("Received number of machines from master, which is " + machineCount); //get mode from the master mode = Mode.valueOf(inStr.readUTF()); _LOG.info("Received mode from master, which is " + mode); //get work plan data from the master int workPlanSize = inStr.readInt(); workPlanData = new byte[workPlanSize]; inStr.readFully(workPlanData); _LOG.info("Received work plan data from master"); } //read config _LOG.info("size of workPlanData: " + workPlanData.length); XmlWorkPlan xmlWork = XmlParser.parseWorkPlan(workPlanData); _LOG.info("Read work plan:"); _LOG.info(xmlWork.toString()); //create worker threads List<WorkerThread> threadList = new ArrayList<WorkerThread>(); List<XmlJob> jobList = xmlWork.getJobList(); for(int a = 0; a<jobList.size(); a++) { XmlJob xmlCurrentJob = jobList.get(a); String jobName = getParamStr(xmlCurrentJob.getProperties(), FLAG_JOB_NAME); if(jobName == null) { _LOG.fatal("Job name is missing. Closing."); return; } int threadCount = getParamInt(xmlCurrentJob.getProperties(), WorkerThread.FLAG_JOB_THREADS); if(threadCount == -1) { _LOG.fatal("ThreadCount is missing for job " + jobName + ". Closing."); return; } else { _LOG.debug("Number of threads for job " + jobName + " is " + threadCount); } int numberOfWorkers = machineCount * threadCount; for(int b = 0; b<threadCount; b++) { int threadId = (machineId * threadCount) + b; WorkerThread newThr = new WorkerThread(mode, threadId, numberOfWorkers, xmlWork, xmlCurrentJob); threadList.add(newThr); _LOG.debug("Added thread: " + newThr.getIdentifier()); } } _LOG.debug("Number of total threads: " + threadList.size()); //create status thread int statusPeriod = getParamInt(xmlWork.getProperties(), FLAG_WORK_STATUS_PERIOD); if(statusPeriod == -1) statusPeriod = 10; StatusThread statThread = new StatusThread(threadList, statusPeriod); //barrier to sync execution time if(isMaster) { for(int a = 0; a<sockList.size(); a++) { Socket slaveSock = sockList.get(a); DataOutputStream out = new DataOutputStream(slaveSock.getOutputStream()); out.writeInt(1); } } else { Socket masterSock = sockList.get(0); DataInputStream in = new DataInputStream(masterSock.getInputStream()); in.readInt(); } //start all threads for(int a = 0; a<threadList.size(); a++) threadList.get(a).start(); statThread.start(); _LOG.info("Started worker threads"); //join all threads for(int a = 0; a<threadList.size(); a++) threadList.get(a).join(); _LOG.info("Joined worker threads"); //stop status thread statThread.clear(); statThread.join(); _LOG.info("Joined status thread"); //join thread-wide results to get machine-wide result _LOG.info("Combining thread-wide results"); Map<String, Profiler> profilerMap = new HashMap<String, Profiler>(); Map<String, Long> optMap = new HashMap<String, Long>(); Map<String, Long> sleepMap = new HashMap<String, Long>(); for(int a = 0; a<threadList.size(); a++) { String jobName = threadList.get(a).getJobName(); long jobOpt = (long) threadList.get(a).getOptSucceeded(); long jobSleep = threadList.get(a).getSleepTime(); Profiler p = threadList.get(a).getProfiler(); if(profilerMap.containsKey(jobName)) { Profiler prevProf = profilerMap.get(jobName); prevProf.add(p); profilerMap.put(jobName, prevProf); long prevOpt = optMap.get(jobName).longValue(); optMap.put(jobName, prevOpt + jobOpt); long prevSleep = sleepMap.get(jobName).longValue(); sleepMap.put(jobName, prevSleep + jobSleep); } else { profilerMap.put(jobName, p); optMap.put(jobName, jobOpt); sleepMap.put(jobName, jobSleep); } } _LOG.info("Combined thread-wide results"); Iterator<String> itrJob = optMap.keySet().iterator(); while(itrJob.hasNext()) { String curJob = itrJob.next(); _LOG.info("job=" + curJob + " opt=" + optMap.get(curJob) + " sleep=" + sleepMap.get(curJob)); } //combine machine-wide results if(isMaster) { //if i am server, get machine-wide results from other machines for(int a = 0; a<sockList.size(); a++) { Socket clientSock = sockList.get(a); DataOutputStream outStr = new DataOutputStream(clientSock.getOutputStream()); DataInputStream inStr = new DataInputStream(clientSock.getInputStream()); outStr.writeInt(3); //get machine-wide results int whoIs = inStr.readInt(); _LOG.info("Got machine id: " + whoIs); int size = inStr.readInt(); _LOG.info("Got number of jobs: " + size); for(int b = 0; b<size; b++) { String jobName = inStr.readUTF(); _LOG.info("Got job name: " + jobName); int dataSize = inStr.readInt(); _LOG.info("Got profiler byte length: " + dataSize); byte data[] = new byte[dataSize]; inStr.readFully(data); _LOG.info("Got profiler data"); Profiler newProf = new Profiler(data); if(profilerMap.containsKey(jobName)) { Profiler oldProf = profilerMap.get(jobName); oldProf.add(newProf); profilerMap.put(jobName, oldProf); } else { profilerMap.put(jobName, newProf); } } outStr.writeInt(3); _LOG.info("Finished getting machine-wide results from machine-" + whoIs); clientSock.close(); _LOG.info("Socket to machine " + whoIs + " is closed"); } } else { //else, send my machine-wide result to the server Socket servSock = sockList.get(0); //send machine-wide results DataOutputStream out = new DataOutputStream(servSock.getOutputStream()); DataInputStream in = new DataInputStream(servSock.getInputStream()); in.readInt(); //write size of profilerMap out.writeInt(machineId); _LOG.info("Sent machine id: " + machineId); out.writeInt(profilerMap.size()); _LOG.info("Sent number of jobs: " + profilerMap.size()); Iterator<String> itr = profilerMap.keySet().iterator(); while(itr.hasNext()) { String jobName = itr.next(); byte data[] = profilerMap.get(jobName).toByteArray(); out.writeUTF(jobName); _LOG.info("Sent job name: " + jobName); out.writeInt(data.length); _LOG.info("Sent profiler byte length " + data.length); out.write(data); _LOG.info("Sent profiler data"); } in.readInt(); _LOG.info("Socket to the master is closed"); } if(isMaster) { DataExporter exp = null; ClassLoader classLoader = RunExperiment.class.getClassLoader(); try { String exporterClass = getParamStr(xmlWork.getProperties(), FLAG_WORK_EXPORTER_CLASS); if(exporterClass == null) { _LOG.warn("Exporter class is changed to com.linkedin.multitenant.exporter.ConsoleExporter by default"); exporterClass = "com.linkedin.multitenant.exporter.ConsoleExporter"; } @SuppressWarnings("rawtypes") Class expClass = classLoader.loadClass(exporterClass); exp = (DataExporter) expClass.newInstance(); } catch(Exception e) { _LOG.error("Error loading exporter", e); exp = new ConsoleExporter(); } exp.init(xmlWork.getProperties(), profilerMap); exp.export(); } _LOG.info("Closing..."); } private static String getParamStr(Map<String, String> properties, String propertyName) { String val = properties.get(propertyName); if(val == null) return null; else return val; } private static int getParamInt(Map<String, String> properties, String propertyName) { String val = properties.get(propertyName); if(val == null) return -1; else return Integer.parseInt(val); } }
/** * Copyright (c) 2013-2015, jcabi.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the jcabi.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jcabi.github.mock; import com.jcabi.aspects.Immutable; import com.jcabi.aspects.Loggable; import com.jcabi.github.Assignees; import com.jcabi.github.Branches; import com.jcabi.github.Collaborators; import com.jcabi.github.Contents; import com.jcabi.github.Coordinates; import com.jcabi.github.DeployKeys; import com.jcabi.github.Forks; import com.jcabi.github.Git; import com.jcabi.github.Github; import com.jcabi.github.Hooks; import com.jcabi.github.IssueEvents; import com.jcabi.github.Issues; import com.jcabi.github.Labels; import com.jcabi.github.Language; import com.jcabi.github.Milestones; import com.jcabi.github.Notifications; import com.jcabi.github.Pulls; import com.jcabi.github.Releases; import com.jcabi.github.Repo; import com.jcabi.github.RepoCommits; import com.jcabi.github.RtLanguage; import com.jcabi.github.Stars; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.json.JsonObject; import javax.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.ToString; /** * Mock Github repo. * @author Yegor Bugayenko (yegor@tpc2.com) * @version $Id$ * @since 0.5 * @checkstyle ClassDataAbstractionCouplingCheck (500 lines) * @checkstyle ClassFanOutComplexity (500 lines) * @todo #1061 Fix code to avoid CouplingBetweenObjects */ @Immutable @Loggable(Loggable.DEBUG) @ToString @EqualsAndHashCode(of = {"storage", "self", "coords" }) @SuppressWarnings({ "PMD.TooManyMethods", "PMD.ExcessiveImports", "PMD.CouplingBetweenObjects" }) final class MkRepo implements Repo { /** * Storage. */ private final transient MkStorage storage; /** * Login of the user logged in. */ private final transient String self; /** * Repo coordinates. */ private final transient Coordinates coords; /** * Public ctor. * @param stg Storage * @param login User to login * @param repo Repo name */ MkRepo( @NotNull(message = "stg can't be NULL") final MkStorage stg, @NotNull(message = "login can't be NULL") final String login, @NotNull(message = "repo can't be NULL") final Coordinates repo ) { this.storage = stg; this.self = login; this.coords = repo; } @Override @NotNull(message = "github is never NULL") public Github github() { return new MkGithub(this.storage, this.self); } @Override @NotNull(message = "coordinates is never NULL") public Coordinates coordinates() { return this.coords; } @Override @NotNull(message = "issue is never NULL") public Issues issues() { try { return new MkIssues(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "milestones is never NULL") public Milestones milestones() { try { return new MkMilestones(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "pulls is never NULL") public Pulls pulls() { try { return new MkPulls(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "hooks is never NULL") public Hooks hooks() { try { return new MkHooks(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "issue events is never NULL") public IssueEvents issueEvents() { try { return new MkIssueEvents(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "labels is never NULL") public Labels labels() { try { return new MkLabels(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "Assignees is never NULL") public Assignees assignees() { try { return new MkAssignees(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "releases is never NULL") public Releases releases() { try { return new MkReleases(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "forks is never NULL") public Forks forks() { try { return new MkForks(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "collaborators is never NULL") public Collaborators collaborators() { try { return new MkCollaborators(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "keys is never NULL") public DeployKeys keys() { try { return new MkDeployKeys(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "contents is never NULL") public Contents contents() { try { return new MkContents(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override public void patch( @NotNull(message = "JSON is never NULL") final JsonObject json ) throws IOException { new JsonPatch(this.storage).patch(this.xpath(), json); } @Override @NotNull(message = "commits is never NULL") public RepoCommits commits() { try { return new MkRepoCommits( this.storage, this.self, this.coordinates() ); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "branches is never NULL") public Branches branches() { try { return new MkBranches( this.storage, this.self, this.coordinates() ); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "Git is never NULL") public Git git() { try { return new MkGit(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "Stars is never NULL") public Stars stars() { try { return new MkStars(this.storage, this.self, this.coords); } catch (final IOException ex) { throw new IllegalStateException(ex); } } @Override @NotNull(message = "Notifications is never NULL") public Notifications notifications() { return new MkNotifications(0); } @Override public Iterable<Language> languages() { final List<Language> languages = new ArrayList<Language>(0); final int java = 999; languages.add(new RtLanguage("Java", java)); final int php = 888; languages.add(new RtLanguage("PHP", php)); final int ruby = 777; languages.add(new RtLanguage("Ruby", ruby)); return languages; } @Override @NotNull(message = "JSON is never NULL") public JsonObject json() throws IOException { return new JsonNode( this.storage.xml().nodes(this.xpath()).get(0) ).json(); } @Override public int compareTo(final Repo repo) { return this.coords.compareTo(repo.coordinates()); } /** * XPath of this element in XML tree. * @return XPath */ @NotNull(message = "Xpath is never NULL") private String xpath() { return String.format( "/github/repos/repo[@coords='%s']", this.coords ); } }
/* * Copyright (c) 2014 Haixing Hu * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.github.haixing_hu.collection.primitive; import java.util.ConcurrentModificationException; import java.util.NoSuchElementException; import static com.github.haixing_hu.lang.Argument.*; /** * Abstract base class for {@link IntList}s backed by random access structures * like arrays. * <p /> * Read-only subclasses must override {@link #get} and {@link #size}. Mutable * subclasses should also override {@link #set}. Variably-sized subclasses * should also override {@link #add(int)} and {@link #removeElementAt}. All * other methods have at least some base implementation derived from these. * Subclasses may choose to override these methods to provide a more efficient * implementation. * * @author Haixing Hu */ public abstract class RandomAccessIntList extends AbstractIntCollection implements IntList { protected int modCount = 0; @Override public abstract int get(int index); @Override public abstract int size(); @Override public abstract int removeElementAt(final int index); @Override public abstract int set(final int index, final int element); @Override public abstract void add(final int index, final int element); @Override public boolean add(final int element) { add(size(), element); return true; } @Override public boolean addAll(int index, final IntCollection collection) { boolean modified = false; final IntIterator iter = collection.iterator(); while (iter.hasNext()) { add(index++, iter.next()); modified = true; } return modified; } @Override public int indexOf(final int element) { final IntIterator iter = iterator(); int i = 0; while (iter.hasNext()) { if (iter.next() == element) { return i; } else { ++i; } } return - 1; } @Override public int lastIndexOf(final int element) { final IntListIterator iter = listIterator(size()); while (iter.hasPrevious()) { if (iter.previous() == element) { return iter.nextIndex(); } } return - 1; } @Override public IntIterator iterator() { return listIterator(); } @Override public IntListIterator listIterator() { return listIterator(0); } @Override public IntListIterator listIterator(final int index) { return new RandomAccessIntListIterator(this, index); } @Override public IntList subList(final int fromIndex, final int toIndex) { return new RandomAccessIntSubList(this, fromIndex, toIndex); } @Override public boolean equals(final Object that) { if (this == that) { return true; } else if (that instanceof IntList) { final IntList thatList = (IntList) that; if (size() != thatList.size()) { return false; } final IntIterator thisIter = iterator(); final IntIterator thatIter = thatList.iterator(); while (thisIter.hasNext()) { if (thisIter.next() != thatIter.next()) { return false; } } return true; } else { return false; } } @Override public int hashCode() { final IntIterator iter = iterator(); int hash = 1; while (iter.hasNext()) { hash = (31 * hash) + iter.next(); } return hash; } @Override public String toString() { if (size() == 0) { return "[]"; } else { final StringBuilder builder = new StringBuilder(); builder.append('['); final IntIterator iter = iterator(); while (iter.hasNext()) { builder.append(iter.next()).append(','); } // eat the last separator ',' builder.setLength(builder.length() - 1); builder.append(']'); return builder.toString(); } } protected static class RandomAccessIntListIterator implements IntListIterator { protected RandomAccessIntList source; protected int nextIndex; protected int lastReturnedIndex; protected int expectedModCount; RandomAccessIntListIterator(final RandomAccessIntList list, final int index) { requireIndexInCloseRange(index, 0, list.size()); source = list; nextIndex = index; lastReturnedIndex = - 1; expectedModCount = source.modCount; } @Override public boolean hasNext() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } return nextIndex < source.size(); } @Override public boolean hasPrevious() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } return nextIndex > 0; } @Override public int nextIndex() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } return nextIndex; } @Override public int previousIndex() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } return nextIndex - 1; } @Override public int next() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } if (! hasNext()) { throw new NoSuchElementException(); } else { final int val = source.get(nextIndex); lastReturnedIndex = nextIndex; ++nextIndex; return val; } } @Override public int previous() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } if (! hasPrevious()) { throw new NoSuchElementException(); } else { final int val = source.get(nextIndex - 1); lastReturnedIndex = nextIndex - 1; --nextIndex; return val; } } @Override public void add(final int value) { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } source.add(nextIndex, value); ++nextIndex; lastReturnedIndex = - 1; expectedModCount = source.modCount; } @Override public void remove() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } if (lastReturnedIndex == - 1) { throw new IllegalStateException(); } if (lastReturnedIndex == nextIndex) { // remove() following previous() source.removeElementAt(lastReturnedIndex); } else { // remove() following next() source.removeElementAt(lastReturnedIndex); --nextIndex; } lastReturnedIndex = - 1; expectedModCount = source.modCount; } @Override public void set(final int value) { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } if (lastReturnedIndex == - 1) { throw new IllegalStateException(); } source.set(lastReturnedIndex, value); expectedModCount = source.modCount; } } protected static class RandomAccessIntSubList extends RandomAccessIntList implements IntList { protected final int offset; protected int limit; protected final RandomAccessIntList source; protected int expectedModCount; RandomAccessIntSubList(final RandomAccessIntList list, final int fromIndex, final int toIndex) { requireLessEqual("fromIndex", fromIndex, "toIndex", toIndex); requireInCloseRange("fromIndex", fromIndex, 0, list.size()); source = list; offset = fromIndex; limit = toIndex - fromIndex; expectedModCount = list.modCount; } @Override public int get(final int index) { requireInRightOpenRange("index", index, 0, limit); if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } return source.get(index + offset); } @Override public int removeElementAt(final int index) { requireInRightOpenRange("index", index, 0, limit); if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } final int val = source.removeElementAt(index + offset); --limit; expectedModCount = source.modCount; ++modCount; return val; } @Override public int set(final int index, final int element) { requireInRightOpenRange("index", index, 0, limit); if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } final int val = source.set(index + offset, element); ++modCount; expectedModCount = source.modCount; return val; } @Override public void add(final int index, final int element) { requireInCloseRange("index", index, 0, limit); if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } source.add(index + offset, element); ++limit; ++modCount; expectedModCount = source.modCount; } @Override public int size() { if (expectedModCount != source.modCount) { throw new ConcurrentModificationException(); } return limit; } } }
package com.simpligility.maven.plugins.android.phase05compile; import com.simpligility.maven.plugins.android.common.AndroidExtension; import com.simpligility.maven.plugins.android.common.ArtifactResolverHelper; import com.simpligility.maven.plugins.android.common.Const; import com.simpligility.maven.plugins.android.common.JarHelper; import com.simpligility.maven.plugins.android.common.NativeHelper; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.DefaultArtifact; import org.apache.maven.artifact.handler.ArtifactHandler; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.logging.Log; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; /** * Various helper methods for dealing with Android Native makefiles. * * @author Johan Lindquist */ public class MakefileHelper { public static final String MAKEFILE_CAPTURE_FILE = "ANDROID_MAVEN_PLUGIN_LOCAL_C_INCLUDES_FILE"; /** * Holder for the result of creating a makefile. This in particular keep tracks of all directories created * for extracted header files. */ public static class MakefileHolder { String makeFile; List<File> includeDirectories; public MakefileHolder( List<File> includeDirectories, String makeFile ) { this.includeDirectories = includeDirectories; this.makeFile = makeFile; } public List<File> getIncludeDirectories() { return includeDirectories; } public String getMakeFile() { return makeFile; } } private final Log log; private final ArtifactResolverHelper artifactResolverHelper; private final ArtifactHandler harArtifactHandler; private final File unpackedApkLibsDirectory; /** * Initialize the MakefileHelper by storing the supplied parameters to local variables. * @param log Log to which to write log output. * @param artifactResolverHelper ArtifactResolverHelper to use to resolve the artifacts. * @param harHandler ArtifactHandler for har files. * @param unpackedApkLibsDirectory Folder in which apklibs are unpacked. */ public MakefileHelper( Log log, ArtifactResolverHelper artifactResolverHelper, ArtifactHandler harHandler, File unpackedApkLibsDirectory ) { this.log = log; this.artifactResolverHelper = artifactResolverHelper; this.harArtifactHandler = harHandler; this.unpackedApkLibsDirectory = unpackedApkLibsDirectory; } /** * Cleans up all include directories created in the temp directory during the build. * * @param makefileHolder The holder produced by the * {@link MakefileHelper#createMakefileFromArtifacts(Set, String, String, boolean)} */ public static void cleanupAfterBuild( MakefileHolder makefileHolder ) { if ( makefileHolder.getIncludeDirectories() != null ) { for ( File file : makefileHolder.getIncludeDirectories() ) { try { FileUtils.deleteDirectory( file ); } catch ( IOException e ) { e.printStackTrace(); } } } } /** * Creates an Android Makefile based on the specified set of static library dependency artifacts. * * @param artifacts The list of (static library) dependency artifacts to create the Makefile from * @param useHeaderArchives If true, the Makefile should include a LOCAL_EXPORT_C_INCLUDES statement, pointing to * the location where the header archive was expanded * @return The created Makefile */ public MakefileHolder createMakefileFromArtifacts( Set<Artifact> artifacts, String ndkArchitecture, String defaultNDKArchitecture, boolean useHeaderArchives ) throws IOException, MojoExecutionException { final StringBuilder makeFile = new StringBuilder( "# Generated by Android Maven Plugin\n" ); final List<File> includeDirectories = new ArrayList<File>(); // Add now output - allows us to somewhat intelligently determine the include paths to use for the header // archive makeFile.append( "$(shell echo \"LOCAL_C_INCLUDES=$(LOCAL_C_INCLUDES)\" > $(" + MAKEFILE_CAPTURE_FILE + "))" ); makeFile.append( '\n' ); makeFile.append( "$(shell echo \"LOCAL_PATH=$(LOCAL_PATH)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" ); makeFile.append( '\n' ); makeFile.append( "$(shell echo \"LOCAL_MODULE_FILENAME=$(LOCAL_MODULE_FILENAME)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" ); makeFile.append( '\n' ); makeFile.append( "$(shell echo \"LOCAL_MODULE=$(LOCAL_MODULE)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" ); makeFile.append( '\n' ); makeFile.append( "$(shell echo \"LOCAL_CFLAGS=$(LOCAL_CFLAGS)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" ); makeFile.append( '\n' ); if ( ! artifacts.isEmpty() ) { for ( Artifact artifact : artifacts ) { final String architecture = NativeHelper.extractArchitectureFromArtifact( artifact, defaultNDKArchitecture ); makeFile.append( '\n' ); makeFile.append( "ifeq ($(TARGET_ARCH_ABI)," ).append( architecture ).append( ")\n" ); makeFile.append( "#\n" ); makeFile.append( "# Group ID: " ); makeFile.append( artifact.getGroupId() ); makeFile.append( '\n' ); makeFile.append( "# Artifact ID: " ); makeFile.append( artifact.getArtifactId() ); makeFile.append( '\n' ); makeFile.append( "# Artifact Type: " ); makeFile.append( artifact.getType() ); makeFile.append( '\n' ); makeFile.append( "# Version: " ); makeFile.append( artifact.getVersion() ); makeFile.append( '\n' ); makeFile.append( "include $(CLEAR_VARS)" ); makeFile.append( '\n' ); makeFile.append( "LOCAL_MODULE := " ); makeFile.append( artifact.getArtifactId() ); makeFile.append( '\n' ); final boolean apklibStatic = addLibraryDetails( makeFile, artifact, ndkArchitecture ); if ( useHeaderArchives ) { try { // Fix for dealing with APKLIBs - unfortunately it does not fully work since // an APKLIB can contain any number of architectures making it somewhat to resolve the // related (HAR) artifact. // // In this case, we construct the classifier from <architecture> and the artifact classifier // if it is also present. Only issue is that if the APKLIB contains more than the armeabi // libraries (e.g. x86 for examples) the HAR is not resolved correctly. // String classifier = artifact.getClassifier(); if ( "apklib".equals( artifact.getType() ) ) { classifier = ndkArchitecture; if ( artifact.getClassifier() != null ) { classifier += "-" + artifact.getClassifier(); } } Artifact harArtifact = new DefaultArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getScope(), Const.ArtifactType.NATIVE_HEADER_ARCHIVE, classifier, harArtifactHandler ); File resolvedHarArtifactFile = artifactResolverHelper.resolveArtifactToFile( harArtifact ); log.debug( "Resolved har artifact file : " + resolvedHarArtifactFile ); final File includeDir = new File( System.getProperty( "java.io.tmpdir" ), "android_maven_plugin_native_includes" + System.currentTimeMillis() + "_" + harArtifact.getArtifactId() ); includeDir.deleteOnExit(); includeDirectories.add( includeDir ); JarHelper.unjar( new JarFile( resolvedHarArtifactFile ), includeDir, new JarHelper.UnjarListener() { @Override public boolean include( JarEntry jarEntry ) { return ! jarEntry.getName().startsWith( "META-INF" ); } } ); makeFile.append( "LOCAL_EXPORT_C_INCLUDES := " ); makeFile.append( includeDir.getAbsolutePath() ); makeFile.append( '\n' ); if ( log.isDebugEnabled() ) { Collection<File> includes = FileUtils.listFiles( includeDir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE ); log.debug( "Listing LOCAL_EXPORT_C_INCLUDES for " + artifact.getId() + ": " + includes ); } } catch ( RuntimeException e ) { throw new MojoExecutionException( "Error while resolving header archive file for: " + artifact.getArtifactId(), e ); } } if ( Const.ArtifactType.NATIVE_IMPLEMENTATION_ARCHIVE.equals( artifact.getType() ) || apklibStatic ) { makeFile.append( "include $(PREBUILT_STATIC_LIBRARY)\n" ); } else { makeFile.append( "include $(PREBUILT_SHARED_LIBRARY)\n" ); } makeFile.append( "endif #" ).append( artifact.getClassifier() ).append( '\n' ); makeFile.append( '\n' ); } } return new MakefileHolder( includeDirectories, makeFile.toString() ); } private boolean addLibraryDetails( StringBuilder makeFile, Artifact artifact, String ndkArchitecture ) throws IOException { boolean apklibStatic = false; if ( AndroidExtension.APKLIB.equals( artifact.getType() ) ) { String classifier = artifact.getClassifier(); String architecture = ( classifier != null ) ? classifier : ndkArchitecture; // // We assume that APKLIB contains a single static OR shared library // that we should link against. The follow code identifies that file. // File[] staticLibs = NativeHelper.listNativeFiles( artifact, unpackedApkLibsDirectory, architecture, true ); if ( staticLibs != null && staticLibs.length > 0 ) { int libIdx = findApklibNativeLibrary( staticLibs, artifact.getArtifactId() ); apklibStatic = true; addLibraryDetails( makeFile, staticLibs[libIdx], "" ); } else { File[] sharedLibs = NativeHelper.listNativeFiles( artifact, unpackedApkLibsDirectory, architecture, false ); if ( sharedLibs == null ) { throw new IOException( "Failed to find any library file in APKLIB" ); } int libIdx = findApklibNativeLibrary( sharedLibs, artifact.getArtifactId() ); addLibraryDetails( makeFile, sharedLibs[libIdx], "" ); } } else { addLibraryDetails( makeFile, artifact.getFile(), artifact.getArtifactId() ); } return apklibStatic; } private void addLibraryDetails( StringBuilder makeFile, File libFile, String outputName ) throws IOException { makeFile.append( "LOCAL_PATH := " ); makeFile.append( libFile.getParentFile().getAbsolutePath() ); makeFile.append( '\n' ); makeFile.append( "LOCAL_SRC_FILES := " ); makeFile.append( libFile.getName() ); makeFile.append( '\n' ); makeFile.append( "LOCAL_MODULE_FILENAME := " ); if ( "".equals( outputName ) ) { makeFile.append( FilenameUtils.removeExtension( libFile.getName() ) ); } else { makeFile.append( outputName ); } makeFile.append( '\n' ); } /** * @param libs the array of possible library files. Must not be null. * @return the index in the array of the library to use * @throws IOException if a library cannot be identified */ private int findApklibNativeLibrary( File[] libs, String artifactName ) throws IOException { int libIdx = -1; if ( libs.length == 1 ) { libIdx = 0; } else { log.info( "Found multiple library files, looking for name match with artifact" ); StringBuilder sb = new StringBuilder(); for ( int i = 0; i < libs.length; i++ ) { if ( sb.length() != 0 ) { sb.append( ", " ); } sb.append( libs[i].getName() ); if ( libs[i].getName().startsWith( "lib" + artifactName ) ) { if ( libIdx != -1 ) { // We have multiple matches, tell the user we can't handle this ... throw new IOException( "Found multiple libraries matching artifact name " + artifactName + ". Please use unique artifact/library names." ); } libIdx = i; } } if ( libIdx < 0 ) { throw new IOException( "Unable to determine main library from " + sb.toString() + " APKLIB should contain only 1 library or a library matching the artifact name" ); } } return libIdx; } /** * Creates a list of artifacts suitable for use in the LOCAL_STATIC_LIBRARIES or LOCAL_SHARED_LIBRARIES * variable in an Android makefile * * @param resolvedLibraryList * @param staticLibrary * @return a list of Ids for artifacts that include static or shared libraries */ public String createLibraryList( Set<Artifact> resolvedLibraryList, String ndkArchitecture, boolean staticLibrary ) { Set<String> libraryNames = new LinkedHashSet<String>(); for ( Artifact a : resolvedLibraryList ) { if ( staticLibrary && Const.ArtifactType.NATIVE_IMPLEMENTATION_ARCHIVE.equals( a.getType() ) ) { libraryNames.add( a.getArtifactId() ); } if ( ! staticLibrary && Const.ArtifactType.NATIVE_SYMBOL_OBJECT.equals( a.getType() ) ) { libraryNames.add( a.getArtifactId() ); } if ( AndroidExtension.APKLIB.equals( a.getType() ) || AndroidExtension.AAR.equals( a.getType() ) ) { File[] libFiles = NativeHelper.listNativeFiles( a, unpackedApkLibsDirectory, ndkArchitecture, staticLibrary ); if ( libFiles != null && libFiles.length > 0 ) { libraryNames.add( a.getArtifactId() ); } } } StringBuilder sb = new StringBuilder(); Iterator<String> iter = libraryNames.iterator(); while ( iter.hasNext() ) { sb.append( iter.next() ); if ( iter.hasNext() ) { sb.append( " " ); } } return sb.toString(); } }
package yang.plugin.segmentation.anis; import ij.IJ; import ij.ImagePlus; import ij.WindowManager; import ij.gui.GenericDialog; import ij.plugin.PlugIn; import ij.process.ByteProcessor; import ij.process.ImageProcessor; /** * <p> *===========What annotates has moved to class {@link DirectionDetectHessian.java} * <p> * In this class, we only deal with 8-bits images. * <p>Using gaussian function to convolve with the original image * to get the hessian matrix which relates to {@link scale}, and * we get the eigen values of the hessian matrix. <p>Get the eigen vector * of the eigen value which is bigger in absolute form. By the vector, * we can get the angle, this is considered as the orientation of the linear feature. * <p> We provide two modes to see the process. The {@link testMode} checkbox shows * which point is smoothed.<p> In fact, there are two procedures to smooth the linear feature of the * dentrite and axion. First, judge which point need smoothed and find the orientation * of this point.Second, generating the anistropic gaussian smooth kernel to smooth the * "point" along the direction. * <p>2015/5/25 * @author yajun yang */ public class AnistropicHessianSmooth implements PlugIn{ private ImagePlus ips; private static double a = 2.0; private static double b = 1.0; private static double h = 3; private static double sample = 1; /** If {@link testMode}is true, it will show the "linear feature marked" image */ private static boolean testMode = false; /** Only when {@link testMode} is true, if {@link isMainMode} is true, it will put out the marked points' orientation degree and the smooth matrix */ private static boolean isMainMode = false; @Override public void run(String arg) { ips = WindowManager.getCurrentImage(); if(null == ips) { IJ.noImage(); return; } if(ips.getType() != ImagePlus.GRAY8) { IJ.showMessage("Only 8-bits gray image"); return; } if(!showDialog(true)) return; ImageProcessor result = ips.getProcessor().duplicate(); anisSmooth(result); ImagePlus ipsResult = new ImagePlus(ips.getTitle()+" Anistropic Smmoth " + DirectionDetectHessian.scale, result); ipsResult.setCalibration(ips.getCalibration()); ipsResult.setDisplayRange(0, 255); ipsResult.show(); } public boolean showDialog(boolean isPlugin) { GenericDialog gd = new GenericDialog("AnistropicHessianSmooth"); gd.addCheckbox("Whether Test Mode ", testMode); gd.addCheckbox("Whether Test Out ", isMainMode); gd.addNumericField("a :", a, 1); gd.addNumericField("b :", b, 1); gd.addNumericField("h :", h, 0); gd.addNumericField("Sample :", sample, 0); gd.showDialog(); if (gd.wasCanceled()) return false; testMode = gd.getNextBoolean(); isMainMode = gd.getNextBoolean(); a = gd.getNextNumber(); b = gd.getNextNumber(); h = gd.getNextNumber(); sample = gd.getNextNumber(); if ((int) h % 2 == 0) h -= 1; if (h < 3) h = 3; return true; } public void anisSmooth(ImageProcessor ip) { DirectionDetectHessian direct = new DirectionDetectHessian(ip); direct.run(); int[][] degree = direct.getDegree(); if (degree == null) { IJ.showMessage("wrong degree output!"); return; } int width = ip.getWidth(); int height = ip.getHeight(); for(int y=0; y<height; y++) for(int x=0; x<width; x++) { if(degree[y][x] != -200) { System.out.println("x= " + x + " y= " + y + " " + degree[y][x]); double[][] kernel = getOrientedGaussianKernel(a, b, degree[y][x], (int)h, sample); convolve(ip, x, y, kernel); } else { double[][] kernel = getOrientedGaussianKernel(b, b, degree[y][x], 3, sample); convolve(ip, x, y, kernel); } } } /**The original point is in the center of the matrix which is odd * If axis is 0, inverse by y axis * If axis is 1, inverse by x axis * @param kernel * @param axis * @return */ public double[][] reverseKernel(double[][] kernel, int axis) { int col = kernel[0].length; int row = kernel.length; if(col != row || row %2 ==0) throw new IllegalArgumentException(); double[][] k = new double[row][col]; for(int i=0; i<row; i++) { for(int j=0; j<col; j++) { switch(axis) { case 0: k[i][j] = kernel[row-1-i][j]; break; case 1: k[i][j] = kernel[i][col-1-j]; break; } } } return k; } public void convolve(ImageProcessor ip, int x, int y, double[][] kernel) { double pixelValue = 0; if(testMode) { // mark the smoothed point pixelValue = 255; ip.putPixel(x, y, (int)pixelValue); if(isMainMode) { for(int i=0; i<(int)h; i++) { for(int j=0; j<(int)h; j++) { System.out.print(kernel[i][j] + " "); } System.out.println(); } } return; } for(int i=0; i<(int)h; i++) { for(int j=0; j<(int)h; j++) { pixelValue += kernel[i][j] * ip.getPixel(x + j - (int)h/2, y + i - (int)h/2); } } ip.putPixel(x, y, Math.round((float)pixelValue)); } /** * This will produce a two dimensional anistropic gaussian kernel. * Here, the coordinate system is different from the image coordinate system. * The "original point" is located at the ellipse center and x axis pointing right, y axis pointing top. * <p>f(x, y) = exp(-(xcosr-ysinr)^2/a^2 - (xsinr+ycosr)^2/b^2)<p> * @param a a > b * @param b * @param degree is the variable 'r' in the above equation. * @param h the kernel size, usually be 3, 5, 7... * @param sample the sampling rate, when calculating f(x,y), {@link sample} multiply x, y. * @return a 2D gaussian kernel */ public double[][] getOrientedGaussianKernel(double a, double b, int degree, int h, double sample) { if(h < 3) h = 3; if(h % 2 == 0) h -= 1; if(a == 0 || b == 0) { IJ.error("Can't make the value a and b of the gaussian kernel zero"); } if(sample <= 0.0) sample = 1; double kernel[][] = new double[h][h]; double sum = 0; double sigma = (double)degree / 180.0 * Math.PI; for(int y = -h/2; y <= h/2; y++) for(int x = -h/2; x <= h/2; x++) { double x2 = x * Math.cos(sigma) - y * Math.sin(sigma); x2 *= sample; x2 *= x2; double y2 = x * Math.sin(sigma) + y * Math.cos(sigma); y2 *= sample; y2 *= y2; x2 /= (a * a); y2 /= (b * b); kernel[x+h/2][y+h/2] = Math.exp(-(x2 + y2)); sum += kernel[x+h/2][y+h/2]; } for(int y=0; y<h; y++) { for(int x=0; x<h; x++) { kernel[x][y] /= sum; } } return kernel; } public static void main(String[] args) { isMainMode = true; byte[] image = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0 , 0, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0 , 0, 0, 0, 0, 100, 0, 0, 0, 0, 0, 100, 0 , 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 0, 0 , 0, 0, 0, 0, 0, 0, 0, 0, 100, 0, 0, 0 , 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; ImageProcessor ip = new ByteProcessor(12, 8, image); for(int y=0; y<ip.getHeight(); y++) { for(int x=0; x<ip.getWidth(); x++) { System.out.print(ip.getPixel(x, y) + " "); } System.out.println(); } AnistropicHessianSmooth ani = new AnistropicHessianSmooth(); ani.anisSmooth(ip); for(int y=0; y<ip.getHeight(); y++) { for(int x=0; x<ip.getWidth(); x++) { System.out.print(ip.getPixel(x, y) + " "); } System.out.println(); } } }
/* * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.jca; import java.util.*; import java.security.*; import java.security.Provider.Service; /** * Collection of utility methods to facilitate implementing getInstance() * methods in the JCA/JCE/JSSE/... framework. * * @author Andreas Sterbenz * @since 1.5 */ public class GetInstance { private GetInstance() { // empty } /** * Static inner class representing a newly created instance. */ public static final class Instance { // public final fields, access directly without accessors public final Provider provider; public final Object impl; private Instance(Provider provider, Object impl) { this.provider = provider; this.impl = impl; } // Return Provider and implementation as an array as used in the // old Security.getImpl() methods. public Object[] toArray() { return new Object[] {impl, provider}; } } public static Service getService(String type, String algorithm) throws NoSuchAlgorithmException { ProviderList list = Providers.getProviderList(); Service s = list.getService(type, algorithm); if (s == null) { throw new NoSuchAlgorithmException (algorithm + " " + type + " not available"); } return s; } public static Service getService(String type, String algorithm, String provider) throws NoSuchAlgorithmException, NoSuchProviderException { if ((provider == null) || (provider.length() == 0)) { throw new IllegalArgumentException("missing provider"); } Provider p = Providers.getProviderList().getProvider(provider); if (p == null) { throw new NoSuchProviderException("no such provider: " + provider); } Service s = p.getService(type, algorithm); if (s == null) { throw new NoSuchAlgorithmException("no such algorithm: " + algorithm + " for provider " + provider); } return s; } public static Service getService(String type, String algorithm, Provider provider) throws NoSuchAlgorithmException { if (provider == null) { throw new IllegalArgumentException("missing provider"); } Service s = provider.getService(type, algorithm); if (s == null) { throw new NoSuchAlgorithmException("no such algorithm: " + algorithm + " for provider " + provider.getName()); } return s; } /** * Return a List of all the available Services that implement * (type, algorithm). Note that the list is initialized lazily * and Provider loading and lookup is only trigered when * necessary. */ public static List<Service> getServices(String type, String algorithm) { ProviderList list = Providers.getProviderList(); return list.getServices(type, algorithm); } /** * This method exists for compatibility with JCE only. It will be removed * once JCE has been changed to use the replacement method. * @deprecated use getServices(List<ServiceId>) instead */ @Deprecated public static List<Service> getServices(String type, List<String> algorithms) { ProviderList list = Providers.getProviderList(); return list.getServices(type, algorithms); } /** * Return a List of all the available Services that implement any of * the specified algorithms. See getServices(String, String) for detals. */ public static List<Service> getServices(List<ServiceId> ids) { ProviderList list = Providers.getProviderList(); return list.getServices(ids); } /* * For all the getInstance() methods below: * @param type the type of engine (e.g. MessageDigest) * @param clazz the Spi class that the implementation must subclass * (e.g. MessageDigestSpi.class) or null if no superclass check * is required * @param algorithm the name of the algorithm (or alias), e.g. MD5 * @param provider the provider (String or Provider object) * @param param the parameter to pass to the Spi constructor * (for CertStores) * * There are overloaded methods for all the permutations. */ public static Instance getInstance(String type, Class<?> clazz, String algorithm) throws NoSuchAlgorithmException { // in the almost all cases, the first service will work // avoid taking long path if so ProviderList list = Providers.getProviderList(); Service firstService = list.getService(type, algorithm); if (firstService == null) { throw new NoSuchAlgorithmException (algorithm + " " + type + " not available"); } NoSuchAlgorithmException failure; try { return getInstance(firstService, clazz); } catch (NoSuchAlgorithmException e) { failure = e; } // if we cannot get the service from the preferred provider, // fail over to the next for (Service s : list.getServices(type, algorithm)) { if (s == firstService) { // do not retry initial failed service continue; } try { return getInstance(s, clazz); } catch (NoSuchAlgorithmException e) { failure = e; } } throw failure; } public static Instance getInstance(String type, Class<?> clazz, String algorithm, Object param) throws NoSuchAlgorithmException { List<Service> services = getServices(type, algorithm); NoSuchAlgorithmException failure = null; for (Service s : services) { try { return getInstance(s, clazz, param); } catch (NoSuchAlgorithmException e) { failure = e; } } if (failure != null) { throw failure; } else { throw new NoSuchAlgorithmException (algorithm + " " + type + " not available"); } } public static Instance getInstance(String type, Class<?> clazz, String algorithm, String provider) throws NoSuchAlgorithmException, NoSuchProviderException { return getInstance(getService(type, algorithm, provider), clazz); } public static Instance getInstance(String type, Class<?> clazz, String algorithm, Object param, String provider) throws NoSuchAlgorithmException, NoSuchProviderException { return getInstance(getService(type, algorithm, provider), clazz, param); } public static Instance getInstance(String type, Class<?> clazz, String algorithm, Provider provider) throws NoSuchAlgorithmException { return getInstance(getService(type, algorithm, provider), clazz); } public static Instance getInstance(String type, Class<?> clazz, String algorithm, Object param, Provider provider) throws NoSuchAlgorithmException { return getInstance(getService(type, algorithm, provider), clazz, param); } /* * The two getInstance() methods below take a service. They are * intended for classes that cannot use the standard methods, e.g. * because they implement delayed provider selection like the * Signature class. */ public static Instance getInstance(Service s, Class<?> clazz) throws NoSuchAlgorithmException { Object instance = s.newInstance(null); checkSuperClass(s, instance.getClass(), clazz); return new Instance(s.getProvider(), instance); } public static Instance getInstance(Service s, Class<?> clazz, Object param) throws NoSuchAlgorithmException { Object instance = s.newInstance(param); checkSuperClass(s, instance.getClass(), clazz); return new Instance(s.getProvider(), instance); } /** * Check is subClass is a subclass of superClass. If not, * throw a NoSuchAlgorithmException. */ public static void checkSuperClass(Service s, Class<?> subClass, Class<?> superClass) throws NoSuchAlgorithmException { if (superClass == null) { return; } if (superClass.isAssignableFrom(subClass) == false) { throw new NoSuchAlgorithmException ("class configured for " + s.getType() + ": " + s.getClassName() + " not a " + s.getType()); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.hint; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeTooltipManager; import com.intellij.openapi.util.Ref; import com.intellij.ui.*; import com.intellij.util.Consumer; import com.intellij.util.ui.Html; import com.intellij.util.ui.UIUtil; import org.intellij.lang.annotations.JdkConstants; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.event.HyperlinkListener; import java.awt.*; import java.awt.event.MouseListener; public class HintUtil { public static final Color INFORMATION_COLOR = new JBColor(new Color(253, 254, 226), new Color(0x4d4f51)); public static final Color QUESTION_COLOR = new JBColor(new Color(181, 208, 251), new Color(55, 108, 137)); public static final Color ERROR_COLOR = new JBColor(new Color(255, 220, 220), new Color(0x781732)); public static final Color QUESTION_UNDERSCORE_COLOR = JBColor.foreground(); private HintUtil() { } public static JComponent createInformationLabel(@NotNull String text) { return createInformationLabel(text, null, null, null); } public static JComponent createInformationLabel(@NotNull String text, @Nullable HyperlinkListener hyperlinkListener, @Nullable MouseListener mouseListener, @Nullable Ref<Consumer<String>> updatedTextConsumer) { HintHint hintHint = getInformationHint(); final HintLabel label = new HintLabel(); label.setText(text, hintHint); label.setIcon(null); if (!hintHint.isAwtTooltip()) { label.setBorder(createHintBorder()); label.setForeground(JBColor.foreground()); label.setFont(getBoldFont()); label.setBackground(INFORMATION_COLOR); label.setOpaque(true); } if (hyperlinkListener != null) { label.myPane.addHyperlinkListener(hyperlinkListener); } if (mouseListener != null) { label.myPane.addMouseListener(mouseListener); } if (updatedTextConsumer != null) { updatedTextConsumer.set(s -> { label.myPane.setText(s); // Force preferred size recalculation. label.setPreferredSize(null); label.myPane.setPreferredSize(null); }); } return label; } @NotNull public static HintHint getInformationHint() { //noinspection UseJBColor return new HintHint().setTextBg(INFORMATION_COLOR) .setTextFg(UIUtil.isUnderDarcula() ? UIUtil.getLabelForeground() : Color.black) .setFont(getBoldFont()) .setAwtTooltip(true); } public static CompoundBorder createHintBorder() { //noinspection UseJBColor return BorderFactory.createCompoundBorder( new ColoredSideBorder(Color.white, Color.white, Color.gray, Color.gray, 1), BorderFactory.createEmptyBorder(2, 2, 2, 2) ); } @NotNull public static JComponent createInformationLabel(SimpleColoredText text) { return createInformationLabel(text, null); } public static JComponent createQuestionLabel(String text) { HintHint hintHint = new HintHint().setTextBg(QUESTION_COLOR) .setTextFg(JBColor.foreground()) .setFont(getBoldFont()) .setAwtTooltip(true); HintLabel label = new HintLabel(); label.setText(text, hintHint); label.setIcon(AllIcons.General.Help_small); if (!hintHint.isAwtTooltip()) { label.setBorder(createHintBorder()); label.setForeground(JBColor.foreground()); label.setFont(getBoldFont()); label.setBackground(QUESTION_COLOR); label.setOpaque(true); } return label; } @NotNull public static SimpleColoredComponent createInformationComponent() { SimpleColoredComponent component = new SimpleColoredComponent(); component.setBackground(INFORMATION_COLOR); component.setForeground(JBColor.foreground()); component.setFont(getBoldFont()); return component; } @NotNull public static JComponent createInformationLabel(@NotNull SimpleColoredText text, @Nullable Icon icon) { SimpleColoredComponent component = createInformationComponent(); component.setIcon(icon); text.appendToComponent(component); return new HintLabel(component); } public static JComponent createErrorLabel(String text) { HintHint hintHint = new HintHint().setTextBg(ERROR_COLOR) .setTextFg(JBColor.foreground()) .setFont(getBoldFont()) .setAwtTooltip(true); HintLabel label = new HintLabel(); label.setText(text, hintHint); label.setIcon(null); if (!hintHint.isAwtTooltip()) { label.setBorder(createHintBorder() ); label.setForeground(JBColor.foreground()); label.setFont(getBoldFont()); label.setBackground(ERROR_COLOR); label.setOpaque(true); } return label; } private static Font getBoldFont() { return UIUtil.getLabelFont().deriveFont(Font.BOLD); } public static JLabel createAdComponent(final String bottomText, final Border border, @JdkConstants.HorizontalAlignment int alignment) { JLabel label = new JLabel(); label.setText(bottomText); label.setHorizontalAlignment(alignment); label.setFont(label.getFont().deriveFont((float)(label.getFont().getSize() - 2))); if (bottomText != null) { label.setBorder(border); } return label; } @NotNull public static String prepareHintText(@NotNull String text, @NotNull HintHint hintHint) { return prepareHintText(new Html(text), hintHint); } public static String prepareHintText(@NotNull Html text, @NotNull HintHint hintHint) { String htmlBody = UIUtil.getHtmlBody(text); return String.format( "<html><head>%s</head><body>%s</body></html>", UIUtil.getCssFontDeclaration(hintHint.getTextFont(), hintHint.getTextForeground(), hintHint.getLinkForeground(), hintHint.getUlImg()), htmlBody ); } private static class HintLabel extends JPanel { private JEditorPane myPane; private SimpleColoredComponent myColored; private JLabel myIcon; private HintLabel() { setLayout(new BorderLayout()); } private HintLabel(@NotNull SimpleColoredComponent component) { this(); setText(component); } @Override public boolean requestFocusInWindow() { // Forward the focus to the tooltip contents so that screen readers announce // the tooltip contents right away. if (myPane != null) { return myPane.requestFocusInWindow(); } else if (myColored != null) { return myColored.requestFocusInWindow(); } else if (myIcon != null) { return myIcon.requestFocusInWindow(); } return super.requestFocusInWindow(); } public void setText(@NotNull SimpleColoredComponent colored) { clearText(); myColored = colored; add(myColored, BorderLayout.CENTER); setOpaque(true); setBackground(colored.getBackground()); revalidate(); repaint(); } public void setText(String s, HintHint hintHint) { clearText(); if (s != null) { myPane = IdeTooltipManager.initPane(s, hintHint, null); add(myPane, BorderLayout.CENTER); } setOpaque(true); setBackground(hintHint.getTextBackground()); revalidate(); repaint(); } private void clearText() { if (myPane != null) { remove(myPane); myPane = null; } if (myColored != null) { remove(myColored); myColored = null; } } public void setIcon(Icon icon) { if (myIcon != null) { remove(myIcon); } myIcon = new JLabel(icon, SwingConstants.CENTER); myIcon.setVerticalAlignment(SwingConstants.TOP); add(myIcon, BorderLayout.WEST); revalidate(); repaint(); } @Override public String toString() { return "Hint: text='" + (myPane != null ? myPane.getText() : "") + "'"; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.lucene.store; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; /** * */ public class InputStreamIndexInputTests extends ESTestCase { public void testSingleReadSingleByteLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(input.getFilePointer(), lessThan(input.length())); assertThat(is.actualSizeToRead(), equalTo(1l)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(-1)); } for (int i = 0; i < 3; i++) { InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(input.getFilePointer(), lessThan(input.length())); assertThat(is.actualSizeToRead(), equalTo(1l)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(-1)); } assertThat(input.getFilePointer(), equalTo(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(0l)); assertThat(is.read(), equalTo(-1)); } public void testReadMultiSingleByteLimit1() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[2]; for (int i = 0; i < 3; i++) { assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(1l)); assertThat(is.read(read), equalTo(1)); assertThat(read[0], equalTo((byte) 1)); } for (int i = 0; i < 3; i++) { assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(1l)); assertThat(is.read(read), equalTo(1)); assertThat(read[0], equalTo((byte) 2)); } assertThat(input.getFilePointer(), equalTo(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 1); assertThat(is.actualSizeToRead(), equalTo(0l)); assertThat(is.read(read), equalTo(-1)); } public void testSingleReadTwoBytesLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(-1)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(-1)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(2)); assertThat(is.read(), equalTo(-1)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(0l)); assertThat(is.read(), equalTo(-1)); } public void testReadMultiTwoBytesLimit1() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[2]; assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 1)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 2)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 2); assertThat(is.actualSizeToRead(), equalTo(0l)); assertThat(is.read(read), equalTo(-1)); } public void testReadMultiFourBytesLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[4]; assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(4l)); assertThat(is.read(read), equalTo(4)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 1)); assertThat(read[2], equalTo((byte) 1)); assertThat(read[3], equalTo((byte) 2)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 2)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(0l)); assertThat(is.read(read), equalTo(-1)); } public void testMarkRest() throws Exception { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); } for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); InputStreamIndexInput is = new InputStreamIndexInput(input, 4); assertThat(is.markSupported(), equalTo(true)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(1)); is.mark(0); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); is.reset(); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); } }
/* * Copyright (c) 2012, Paul Merlin. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.qibud.eventstore.test; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.StringBufferInputStream; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.codeartisans.java.toolbox.io.IO; import org.junit.Test; import org.qibud.eventstore.DomainEvent; import org.qibud.eventstore.DomainEventAttachment; import org.qibud.eventstore.DomainEventFactory; import org.qibud.eventstore.DomainEventsSequence; import org.qibud.eventstore.DomainEventsSequenceBuilder; import org.qibud.eventstore.EventStore; import org.qibud.eventstore.EventStreamListener; import static org.junit.Assert.*; public abstract class AbstractEventStoreTest { protected abstract EventStore newEventStore() throws Exception; interface FirstEvent { } interface SecondEvent { } interface ThirdEvent { } interface FourthEvent { } interface MyEvent { } @Test public void globalTestWithoutAttachments() throws Exception { EventStore eventStore = null; try { eventStore = newEventStore(); DomainEventFactory eventFactory = new DomainEventFactory(); System.out.println( " >> EVENT STORE TEST :: GLOBAL -----------------------------------------------------" ); Map<String, String> testData = new HashMap<String, String>(); testData.put( "foo", "bar" ); final List<DomainEventsSequence> listenedEvents = new ArrayList<DomainEventsSequence>(); eventStore.registerEventStreamListener( new EventStreamListener() { @Override public void onDomainEventsSequence( DomainEventsSequence events ) { listenedEvents.add( events ); } } ); DomainEventsSequence events = new DomainEventsSequenceBuilder().withUsecase( "EventStore unit test" ). withUser( "Build System" ). withEvents( eventFactory.newDomainEvent( FirstEvent.class, testData ), eventFactory.newDomainEvent( SecondEvent.class, testData ) ). build(); eventStore.storeEvents( events ); assertEquals( "EventStreamListener", 1, listenedEvents.size() ); assertEquals( "ListenedEventsData", 1, listenedEvents.get( 0 ).events().get( 0 ).data().length() ); assertEquals( "EventStoreCount", 1, eventStore.count() ); assertEquals( "EventStoreFetch", 1, eventStore.eventsSequences( 0, Integer.MAX_VALUE ).size() ); events = new DomainEventsSequenceBuilder().withUsecase( "EventStore unit test" ). withUser( "Build System" ). withEvents( eventFactory.newDomainEvent( ThirdEvent.class, testData ), eventFactory.newDomainEvent( FourthEvent.class, testData ) ). build(); eventStore.storeEvents( events ); assertEquals( "EventStreamListener", 2, listenedEvents.size() ); assertEquals( "EventStoreCount", 2, eventStore.count() ); assertEquals( "EventStoreFetch", 2, eventStore.eventsSequences( 0, Integer.MAX_VALUE ).size() ); StringWriter backupWriter = new StringWriter(); eventStore.backup( backupWriter ); String backup = backupWriter.toString(); System.out.println( backup ); eventStore.restore( new StringReader( backup ) ); assertEquals( "EventStreamListener", 2, listenedEvents.size() ); assertEquals( "EventStoreCount", 4, eventStore.count() ); assertEquals( "EventStoreFetch", 4, eventStore.eventsSequences( 0, Integer.MAX_VALUE ).size() ); eventStore.replay( 2, Integer.MAX_VALUE ); assertEquals( "EventStreamListener", 4, listenedEvents.size() ); backupWriter = new StringWriter(); eventStore.backup( backupWriter ); backup = backupWriter.toString(); System.out.println( backup ); eventStore.clear(); eventStore.restore( new StringReader( backup ) ); eventStore.replay( 0, Integer.MAX_VALUE ); assertEquals( "EventStreamListener", 8, listenedEvents.size() ); assertEquals( "EventStoreCount", 4, eventStore.count() ); assertEquals( "EventStoreFetch", 4, eventStore.eventsSequences( 0, Integer.MAX_VALUE ).size() ); } finally { if ( eventStore != null ) { eventStore.clear(); } } } @Test public void testAttachments() throws Exception { EventStore eventStore = null; try { eventStore = newEventStore(); DomainEventFactory eventFactory = new DomainEventFactory(); System.out.println( " >> EVENT STORE TEST :: ATTACHMENTS ------------------------------------------------" ); Map<String, String> testData = new HashMap<String, String>(); testData.put( "foo", "bar" ); DomainEventAttachment attachment = eventFactory.newDomainEventAttachment( new DomainEventAttachment.DataProvider() { @Override public InputStream data() throws IOException { return new StringBufferInputStream( "TEST DATA" ); } } ); DomainEvent event = eventFactory.newDomainEvent( MyEvent.class, testData ); event.data().put( "single-attachment", attachment.localIdentity() ); DomainEventsSequence sequence = new DomainEventsSequenceBuilder().withUsecase( "EventStore attachments test" ). withUser( "Build System" ). withEvents( event ). withAttachments( attachment ).build(); eventStore.storeEvents( sequence ); StringWriter backupWriter = new StringWriter(); eventStore.backup( backupWriter ); String backup = backupWriter.toString(); System.out.println( backup ); DomainEventsSequence fetchedSequence = eventStore.eventsSequence( 0 ); assertFalse( "fetched sequence attachments empty", fetchedSequence.attachments().isEmpty() ); assertEquals( "fetched sequence attachments size", 1, fetchedSequence.attachments().size() ); DomainEventAttachment fetchedAttachment = fetchedSequence.attachments().get( 0 ); StringWriter writer = new StringWriter(); InputStreamReader reader = new InputStreamReader( fetchedAttachment.data() ); IO.copy( reader, writer ); String readAttachment = writer.toString(); assertEquals( "fetched attachment data is correct", "TEST DATA", readAttachment ); } finally { if ( eventStore != null ) { eventStore.clear(); } } } }
// // Copyright 2016 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.script.aggregator; import io.warp10.continuum.gts.GeoTimeSerie; import io.warp10.script.NamedWarpScriptFunction; import io.warp10.script.WarpScriptAggregatorFunction; import io.warp10.script.WarpScriptBucketizerFunction; import io.warp10.script.WarpScriptMapperFunction; import io.warp10.script.WarpScriptReducerFunction; import io.warp10.script.WarpScriptException; import java.util.Arrays; import java.util.ArrayList; import java.util.List; import com.geoxp.GeoXPLib; /** * Return the Median Absolute Deviation * @see https://en.wikipedia.org/wiki/Median_absolute_deviation * * The returned location will be the median of all locations. * The returned elevation will be the median of all elevations. */ public class MAD extends NamedWarpScriptFunction implements WarpScriptAggregatorFunction, WarpScriptMapperFunction, WarpScriptBucketizerFunction, WarpScriptReducerFunction { public MAD(String name) { super(name); } @Override public Object apply(Object[] args) throws WarpScriptException { long tick = (long) args[0]; long[] ticks = (long[]) args[3]; long[] locations = (long[]) args[4]; long[] elevations = (long[]) args[5]; Object[] values = (Object[]) args[6]; // // Sort locations, elevations // Arrays.sort(locations); Arrays.sort(elevations); long location = GeoTimeSerie.NO_LOCATION; long elevation = GeoTimeSerie.NO_ELEVATION; // // If start and end locations are identical, set median to that value // if (locations[0] == locations[locations.length - 1]) { location = locations[0]; } else { // Remove NO_LOCATION int idx = Arrays.binarySearch(locations, GeoTimeSerie.NO_LOCATION); int len = locations.length; if (idx >= 0) { int i = idx + 1; while (i < locations.length && GeoTimeSerie.NO_LOCATION == locations[i]) { i++; } // Remove the NO_LOCATION values from the array if (i < locations.length) { System.arraycopy(locations, i, locations, idx, locations.length - i); len -= (i - idx); } } // Compute median of location if (0 == len % 2) { double[] high = GeoXPLib.fromGeoXPPoint(locations[len / 2]); double[] low = GeoXPLib.fromGeoXPPoint(locations[(len / 2) - 1]); location = GeoXPLib.toGeoXPPoint((high[0] + low[0])/2.0D, (high[1] + low[1])/2.0D); } else { location = locations[len / 2]; } } // // If start and end elevations are identical, set median to that value // if (elevations[0] == elevations[elevations.length - 1]) { elevation = elevations[0]; } else { // Remove NO_elevation int idx = Arrays.binarySearch(elevations, GeoTimeSerie.NO_ELEVATION); int len = elevations.length; if (idx >= 0) { int i = idx + 1; while (i < elevations.length && GeoTimeSerie.NO_ELEVATION == elevations[i]) { i++; } // Remove the NO_elevation values from the array if (i < elevations.length) { System.arraycopy(elevations, i, elevations, idx, elevations.length - i); len -= (i - idx); } } // Compute median of elevation if (0 == len % 2) { elevation = (elevations[len / 2] + elevations[(len / 2) - 1]) / 2L; } else { elevation = elevations[len / 2]; } } // // Remove nulls // for (int i = 0; i < values.length; i++) { //fast if there is no null values (mapper or bucketizer use) if (null == values[i]) { //if one null is detected, rebuild an array without nulls and breaks. List nonnullvalues = new ArrayList<Object>(values.length - 1); for (int n = 0; n < values.length; n++) { if (null != values[n]) { nonnullvalues.add(values[n]); } } values = nonnullvalues.toArray(); break; } } // // Sort nonnullvalues // Arrays.sort(values); // // If extrema are identical, use this as the median, otherwise remove nulls // Object median = null; if (0 != values.length) { if (values[0].equals(values[values.length - 1])) { median = values[0]; } else { int len = values.length; // Compute median if (0 == len % 2) { Object low = values[(len / 2) - 1]; Object high = values[len / 2]; if (low instanceof Long && high instanceof Long) { median = ((long) low + (long) high) / 2L; } else if (low instanceof Double && high instanceof Double) { median = ((double) low + (double) high) / 2.0D; } else { throw new WarpScriptException("Unable to compute median on an even number of non numeric values."); } } else { median = values[len / 2]; } } } // // Now compute the residuals // Object[] residuals = new Object[values.length]; for (int i = 0; i < values.length; i++) { if (median instanceof Long) { residuals[i] = (long) Math.abs((long) median - (long) values[i]); } else { residuals[i] = (double) Math.abs((double) median - (double) values[i]); } } // Sort residuals Arrays.sort(residuals); // Compute median of residuals Object mad = null; if (0 != residuals.length) { if (residuals[0].equals(residuals[residuals.length - 1])) { mad = residuals[0]; } else { int len = residuals.length; // Compute median if (0 == len % 2) { Object low = residuals[(len / 2) - 1]; Object high = residuals[len / 2]; if (low instanceof Long && high instanceof Long) { mad = ((long) low + (long) high) / 2L; } else if (low instanceof Double && high instanceof Double) { mad = ((double) low + (double) high) / 2.0D; } else { throw new WarpScriptException("Unable to compute median absolute deviation on an even number of non numeric values."); } } else { mad = residuals[len / 2]; } } } return new Object[] { tick, location, elevation, mad }; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android; import com.facebook.buck.android.apkmodule.APKModule; import com.facebook.buck.android.exopackage.ExopackageMode; import com.facebook.buck.android.redex.ReDexStep; import com.facebook.buck.android.redex.RedexOptions; import com.facebook.buck.android.toolchain.AndroidPlatformTarget; import com.facebook.buck.android.toolchain.AndroidSdkLocation; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.build.execution.context.ExecutionContext; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rulekey.AddsToRuleKey; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.core.toolchain.tool.Tool; import com.facebook.buck.io.BuildCellRelativePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.step.AbstractExecutionStep; import com.facebook.buck.step.Step; import com.facebook.buck.step.StepExecutionResult; import com.facebook.buck.step.StepExecutionResults; import com.facebook.buck.step.fs.CopyStep; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirStep; import com.facebook.buck.unarchive.UnzipStep; import com.facebook.buck.util.MoreSuppliers; import com.facebook.buck.util.stream.RichStream; import com.facebook.buck.zip.RepackZipEntriesStep; import com.facebook.buck.zip.ZipScrubberStep; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.io.Files; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.EnumSet; import java.util.Optional; import java.util.function.Supplier; class AndroidBinaryBuildable implements AddsToRuleKey { /** * This is the path from the root of the APK that should contain the metadata.txt and * secondary-N.dex.jar files for secondary dexes. */ static final String SMART_DEX_SECONDARY_DEX_SUBDIR = "assets/smart-dex-secondary-program-dex-jars"; @AddToRuleKey private final EnumSet<ExopackageMode> exopackageModes; @AddToRuleKey private final SourcePath androidManifestPath; @AddToRuleKey private final DexFilesInfo dexFilesInfo; @AddToRuleKey private final NativeFilesInfo nativeFilesInfo; @AddToRuleKey private final ResourceFilesInfo resourceFilesInfo; @AddToRuleKey final boolean packageAssetLibraries; @AddToRuleKey final boolean compressAssetLibraries; @AddToRuleKey final Optional<CompressionAlgorithm> assetCompressionAlgorithm; @AddToRuleKey private final Optional<RedexOptions> redexOptions; @AddToRuleKey // Redex accesses some files that are indirectly referenced through the proguard command-line.txt. // TODO(cjhopman): Redex shouldn't do that, or this list should be constructed more carefully. private final ImmutableList<SourcePath> additionalRedexInputs; @AddToRuleKey private final int xzCompressionLevel; @AddToRuleKey private final SourcePath keystorePath; @AddToRuleKey private final SourcePath keystorePropertiesPath; @AddToRuleKey private final ImmutableSortedSet<APKModule> apkModules; // The java launcher is used for ApkBuilder. @AddToRuleKey private final Tool javaRuntimeLauncher; // Post-process resource compression @AddToRuleKey private final boolean isCompressResources; @AddToRuleKey private final ImmutableMap<APKModule, SourcePath> moduleResourceApkPaths; private final boolean isApk; // Path to Bundles config file @AddToRuleKey private final Optional<SourcePath> bundleConfigFilePath; // These should be the only things not added to the rulekey. private final ProjectFilesystem filesystem; private final BuildTarget buildTarget; private final AndroidSdkLocation androidSdkLocation; private final AndroidPlatformTarget androidPlatformTarget; AndroidBinaryBuildable( BuildTarget buildTarget, ProjectFilesystem filesystem, AndroidSdkLocation androidSdkLocation, AndroidPlatformTarget androidPlatformTarget, SourcePath keystorePath, SourcePath keystorePropertiesPath, Optional<RedexOptions> redexOptions, ImmutableList<SourcePath> additionalRedexInputs, EnumSet<ExopackageMode> exopackageModes, int xzCompressionLevel, boolean packageAssetLibraries, boolean compressAssetLibraries, Optional<CompressionAlgorithm> assetCompressionAlgorithm, Tool javaRuntimeLauncher, SourcePath androidManifestPath, boolean isCompressResources, DexFilesInfo dexFilesInfo, NativeFilesInfo nativeFilesInfo, ResourceFilesInfo resourceFilesInfo, ImmutableSortedSet<APKModule> apkModules, ImmutableMap<APKModule, SourcePath> moduleResourceApkPaths, Optional<SourcePath> bundleConfigFilePath, boolean isApk) { this.filesystem = filesystem; this.buildTarget = buildTarget; this.androidSdkLocation = androidSdkLocation; this.androidPlatformTarget = androidPlatformTarget; this.keystorePath = keystorePath; this.keystorePropertiesPath = keystorePropertiesPath; this.redexOptions = redexOptions; this.additionalRedexInputs = additionalRedexInputs; this.exopackageModes = exopackageModes; this.xzCompressionLevel = xzCompressionLevel; this.javaRuntimeLauncher = javaRuntimeLauncher; this.androidManifestPath = androidManifestPath; this.isCompressResources = isCompressResources; this.apkModules = apkModules; this.moduleResourceApkPaths = moduleResourceApkPaths; this.dexFilesInfo = dexFilesInfo; this.nativeFilesInfo = nativeFilesInfo; this.packageAssetLibraries = packageAssetLibraries; this.compressAssetLibraries = compressAssetLibraries; this.assetCompressionAlgorithm = assetCompressionAlgorithm; this.resourceFilesInfo = resourceFilesInfo; this.bundleConfigFilePath = bundleConfigFilePath; this.isApk = isApk; } @SuppressWarnings("PMD.PrematureDeclaration") public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { SourcePathResolverAdapter pathResolver = context.getSourcePathResolver(); ImmutableList.Builder<Step> steps = ImmutableList.builder(); // The `HasInstallableApk` interface needs access to the manifest, so make sure we create our // own copy of this so that we don't have a runtime dep on the `AaptPackageResources` step. Path manifestPath = getManifestPath(); steps.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), manifestPath.getParent()))); steps.add( CopyStep.forFile( getProjectFilesystem(), pathResolver.getRelativePath(androidManifestPath), manifestPath)); buildableContext.recordArtifact(manifestPath); dexFilesInfo.proguardTextFilesPath.ifPresent( path -> { steps.add(createCopyProguardFilesStep(pathResolver, path)); }); ImmutableSet.Builder<Path> nativeLibraryDirectoriesBuilder = ImmutableSet.builder(); // Copy the transitive closure of native-libs-as-assets to a single directory, if any. ImmutableSet.Builder<Path> nativeLibraryAsAssetDirectories = ImmutableSet.builder(); ImmutableSet.Builder<Path> moduleResourcesDirectories = ImmutableSet.builder(); ImmutableSet.Builder<ModuleInfo> modulesInfo = ImmutableSet.builder(); ImmutableMap<String, SourcePath> mapOfModuleToSecondaryDexSourcePaths = dexFilesInfo.getMapOfModuleToSecondaryDexSourcePaths(); ImmutableModuleInfo.Builder baseModuleInfo = ImmutableModuleInfo.builder(); baseModuleInfo.setModuleName("base"); for (APKModule module : apkModules) { processModule( module, nativeLibraryDirectoriesBuilder, nativeLibraryAsAssetDirectories, moduleResourcesDirectories, steps, pathResolver, context, mapOfModuleToSecondaryDexSourcePaths, baseModuleInfo, modulesInfo); } // If non-english strings are to be stored as assets, pass them to ApkBuilder. ImmutableSet.Builder<Path> zipFiles = ImmutableSet.builder(); RichStream.from(resourceFilesInfo.primaryApkAssetsZips) .map(pathResolver::getRelativePath) .forEach(zipFiles::add); if (ExopackageMode.enabledForNativeLibraries(exopackageModes) && !ExopackageMode.enabledForArch64(exopackageModes)) { // We need to include a few dummy native libraries with our application so that Android knows // to run it as 32-bit. Android defaults to 64-bit when no libraries are provided at all, // causing us to fail to load our 32-bit exopackage native libraries later. String fakeNativeLibraryBundle = System.getProperty("buck.native_exopackage_fake_path"); if (fakeNativeLibraryBundle == null) { throw new RuntimeException("fake native bundle not specified in properties"); } zipFiles.add(Paths.get(fakeNativeLibraryBundle)); } ImmutableSet<Path> allAssetDirectories = ImmutableSet.<Path>builder() .addAll(moduleResourcesDirectories.build()) .addAll(nativeLibraryAsAssetDirectories.build()) .addAll(dexFilesInfo.getSecondaryDexDirs(getProjectFilesystem(), pathResolver)) .build(); SourcePathResolverAdapter resolver = context.getSourcePathResolver(); Path signedApkPath = getSignedApkPath(); Path pathToKeystore = resolver.getAbsolutePath(keystorePath); Supplier<KeystoreProperties> keystoreProperties = getKeystorePropertiesSupplier(resolver, pathToKeystore); ImmutableSet<Path> thirdPartyJars = resourceFilesInfo.pathsToThirdPartyJars.stream() .map(resolver::getAbsolutePath) .collect(ImmutableSet.toImmutableSet()); if (isApk) { steps.add( new ApkBuilderStep( getProjectFilesystem(), pathResolver.getAbsolutePath(resourceFilesInfo.resourcesApkPath), getSignedApkPath(), pathResolver.getRelativePath(dexFilesInfo.primaryDexPath), allAssetDirectories, nativeLibraryDirectoriesBuilder.build(), zipFiles.build(), thirdPartyJars, pathToKeystore, keystoreProperties, false, javaRuntimeLauncher.getCommandPrefix(pathResolver))); } else { ImmutableSet<String> moduleNames = apkModules.stream().map(APKModule::getName).collect(ImmutableSet.toImmutableSet()); for (Path path : dexFilesInfo.getSecondaryDexDirs(getProjectFilesystem(), pathResolver)) { if (path.getFileName().toString().equals("additional_dexes")) { File[] assetFiles = path.toFile().listFiles(); if (assetFiles == null) { continue; } for (File assetFile : assetFiles) { if (!assetFile.getName().equals("assets")) { continue; } File[] modules = assetFile.listFiles(); if (modules == null) { continue; } for (File module : modules) { if (moduleNames.contains(module.getName())) { continue; } baseModuleInfo.putAssetDirectories(module.toPath(), "assets"); } } } else { baseModuleInfo.putAssetDirectories(path, ""); } } baseModuleInfo .setResourceApk(pathResolver.getAbsolutePath(resourceFilesInfo.resourcesApkPath)) .addDexFile(pathResolver.getRelativePath(dexFilesInfo.primaryDexPath)) .setJarFilesThatMayContainResources(thirdPartyJars) .setZipFiles(zipFiles.build()); modulesInfo.add(baseModuleInfo.build()); Optional<Path> bundleConfigPath = bundleConfigFilePath.map(pathResolver::getAbsolutePath); steps.add( new AabBuilderStep( getProjectFilesystem(), getSignedApkPath(), bundleConfigPath, buildTarget, false, modulesInfo.build())); } // The `ApkBuilderStep` delegates to android tools to build a ZIP with timestamps in it, making // the output non-deterministic. So use an additional scrubbing step to zero these out. steps.add(ZipScrubberStep.of(getProjectFilesystem().resolve(signedApkPath))); Path apkToRedexAndAlign; // Optionally, compress the resources file in the .apk. if (isCompressResources) { Path compressedApkPath = getCompressedResourcesApkPath(); apkToRedexAndAlign = compressedApkPath; steps.add(createRepackZipEntriesStep(signedApkPath, compressedApkPath)); } else { apkToRedexAndAlign = signedApkPath; } boolean applyRedex = redexOptions.isPresent(); Path apkToAlign = apkToRedexAndAlign; Path v2SignedApkPath = getFinalApkPath(); if (applyRedex) { Path redexedApk = getRedexedApkPath(); apkToAlign = redexedApk; steps.addAll( createRedexSteps( context, buildableContext, resolver, keystoreProperties, apkToRedexAndAlign, redexedApk)); } if (isApk) { Path zipalignedApkPath = getZipalignedApkPath(); steps.add( new ZipalignStep( getProjectFilesystem().getRootPath(), androidPlatformTarget, apkToAlign, zipalignedApkPath)); steps.add( new ApkSignerStep( getProjectFilesystem(), zipalignedApkPath, v2SignedApkPath, keystoreProperties, applyRedex)); } else { steps.add( new ZipalignStep( getProjectFilesystem().getRootPath(), androidPlatformTarget, apkToAlign, v2SignedApkPath)); } buildableContext.recordArtifact(v2SignedApkPath); return steps.build(); } private void processModule( APKModule module, ImmutableSet.Builder<Path> nativeLibraryDirectoriesBuilder, ImmutableSet.Builder<Path> nativeLibraryAsAssetDirectories, ImmutableSet.Builder<Path> moduleResourcesDirectories, ImmutableList.Builder<Step> steps, SourcePathResolverAdapter pathResolver, BuildContext context, ImmutableMap<String, SourcePath> mapOfModuleToSecondaryDexSourcePaths, ImmutableModuleInfo.Builder baseModuleInfo, ImmutableSet.Builder<ModuleInfo> modulesInfo) { boolean addThisModule = false; ImmutableMap.Builder<Path, String> assetDirectoriesBuilderForThisModule = ImmutableMap.builder(); ImmutableSet.Builder<Path> nativeLibraryDirectoriesBuilderForThisModule = ImmutableSet.builder(); Path resourcesDirectoryForThisModule = null; ImmutableSet.Builder<Path> dexFileDirectoriesBuilderForThisModule = ImmutableSet.builder(); if (mapOfModuleToSecondaryDexSourcePaths.containsKey(module.getName())) { addDexFileDirectories( pathResolver, module, mapOfModuleToSecondaryDexSourcePaths, dexFileDirectoriesBuilderForThisModule, assetDirectoriesBuilderForThisModule); } boolean shouldPackageAssetLibraries = packageAssetLibraries || !module.isRootModule(); if (!ExopackageMode.enabledForNativeLibraries(exopackageModes) && nativeFilesInfo.nativeLibsDirs.isPresent() && nativeFilesInfo.nativeLibsDirs.get().containsKey(module)) { addThisModule = true; addNativeDirectory( shouldPackageAssetLibraries, module, pathResolver, nativeLibraryDirectoriesBuilder, nativeLibraryDirectoriesBuilderForThisModule); } // Package prebuilt libs which need to be loaded by `System.loadLibrary` in the standard dir, // even in exopackage builds. if (nativeFilesInfo.nativeLibsDirForSystemLoader.isPresent() && module.isRootModule()) { addThisModule = true; Path relativePath = pathResolver.getRelativePath(nativeFilesInfo.nativeLibsDirForSystemLoader.get()); nativeLibraryDirectoriesBuilder.add(relativePath); nativeLibraryDirectoriesBuilderForThisModule.add(relativePath); } if (shouldPackageAssetLibraries) { addThisModule = true; addNativeLibraryAsAssetDirectory( module, context, nativeLibraryAsAssetDirectories, assetDirectoriesBuilderForThisModule, steps); } if (moduleResourceApkPaths.get(module) != null) { addThisModule = true; resourcesDirectoryForThisModule = addModuleResourceDirectory(module, context, moduleResourcesDirectories, steps); } if (!addThisModule || isApk) { return; } if (module.isRootModule()) { baseModuleInfo .putAllAssetDirectories(assetDirectoriesBuilderForThisModule.build()) .addAllNativeLibraryDirectories(nativeLibraryDirectoriesBuilderForThisModule.build()) .addAllDexFile(dexFileDirectoriesBuilderForThisModule.build()); } else { String moduleName = module.getName(); modulesInfo.add( ImmutableModuleInfo.of( moduleName, resourcesDirectoryForThisModule, dexFileDirectoriesBuilderForThisModule.build(), assetDirectoriesBuilderForThisModule.build(), nativeLibraryDirectoriesBuilderForThisModule.build(), ImmutableSet.<Path>builder().build(), ImmutableSet.<Path>builder().build())); } } private void addDexFileDirectories( SourcePathResolverAdapter pathResolver, APKModule module, ImmutableMap<String, SourcePath> mapOfModuleToSecondaryDexSourcePaths, ImmutableSet.Builder<Path> dexFileDirectoriesBuilderForThisModule, ImmutableMap.Builder<Path, String> assetDirectoriesBuilderForThisModule) { File[] dexFiles = filesystem .getPathForRelativePath( pathResolver.getRelativePath( mapOfModuleToSecondaryDexSourcePaths.get(module.getName()))) .toFile() .listFiles(); if (dexFiles == null) { return; } for (File dexFile : dexFiles) { if (dexFile.getName().endsWith(".dex")) { dexFileDirectoriesBuilderForThisModule.add( filesystem.getPathForRelativePath(dexFile.toPath())); } else { Path current = pathResolver.getRelativePath( mapOfModuleToSecondaryDexSourcePaths.get(module.getName())); String prefix = current.getParent().getParent().relativize(current).toString(); assetDirectoriesBuilderForThisModule.put(current, prefix); } } } private void addNativeDirectory( boolean shouldPackageAssetLibraries, APKModule module, SourcePathResolverAdapter pathResolver, ImmutableSet.Builder<Path> nativeLibraryDirectoriesBuilder, ImmutableSet.Builder<Path> nativeLibraryDirectoriesBuilderForThisModule) { nativeLibraryDirectoriesBuilder.add( pathResolver.getRelativePath(nativeFilesInfo.nativeLibsDirs.get().get(module))); nativeLibraryDirectoriesBuilderForThisModule.add( pathResolver.getRelativePath(nativeFilesInfo.nativeLibsDirs.get().get(module))); if (shouldPackageAssetLibraries) { return; } nativeLibraryDirectoriesBuilder.add( pathResolver.getRelativePath(nativeFilesInfo.nativeLibsAssetsDirs.get().get(module))); nativeLibraryDirectoriesBuilderForThisModule.add( pathResolver.getRelativePath(nativeFilesInfo.nativeLibsAssetsDirs.get().get(module))); } private void addNativeLibraryAsAssetDirectory( APKModule module, BuildContext context, ImmutableSet.Builder<Path> nativeLibraryAsAssetDirectories, ImmutableMap.Builder<Path, String> assetDirectoriesBuilderForThisModule, ImmutableList.Builder<Step> steps) { Preconditions.checkState( ExopackageMode.enabledForModules(exopackageModes) || !ExopackageMode.enabledForResources(exopackageModes)); Path pathForNativeLibsAsAssets = getPathForNativeLibsAsAssets(); Path libSubdirectory = pathForNativeLibsAsAssets .resolve("assets") .resolve(module.isRootModule() ? "lib" : module.getName()); getStepsForNativeAssets( context, steps, libSubdirectory, module.isRootModule() ? "metadata.txt" : "libs.txt", module); nativeLibraryAsAssetDirectories.add(pathForNativeLibsAsAssets); assetDirectoriesBuilderForThisModule.put(pathForNativeLibsAsAssets, ""); } private Path addModuleResourceDirectory( APKModule module, BuildContext context, ImmutableSet.Builder<Path> moduleResourcesDirectories, ImmutableList.Builder<Step> steps) { SourcePath resourcePath = moduleResourceApkPaths.get(module); Path moduleResDirectory = BuildTargetPaths.getScratchPath( getProjectFilesystem(), buildTarget, "__module_res_" + module.getName() + "_%s__"); Path unpackDirectory = moduleResDirectory.resolve("assets").resolve(module.getName()); steps.addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), unpackDirectory))); steps.add( new UnzipStep( getProjectFilesystem(), context.getSourcePathResolver().getAbsolutePath(resourcePath), unpackDirectory, Optional.empty())); moduleResourcesDirectories.add(moduleResDirectory); return unpackDirectory; } private void getStepsForNativeAssets( BuildContext context, ImmutableList.Builder<Step> steps, Path libSubdirectory, String metadataFilename, APKModule module) { steps.addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), libSubdirectory))); // Input asset libraries are sorted in descending filesize order. ImmutableSortedSet.Builder<Path> inputAssetLibrariesBuilder = ImmutableSortedSet.orderedBy( (libPath1, libPath2) -> { try { ProjectFilesystem filesystem = getProjectFilesystem(); int filesizeResult = -Long.compare( filesystem.getFileSize(libPath1), filesystem.getFileSize(libPath2)); int pathnameResult = libPath1.compareTo(libPath2); return filesizeResult != 0 ? filesizeResult : pathnameResult; } catch (IOException e) { return 0; } }); if (packageAssetLibraries || !module.isRootModule()) { // TODO(cjhopman): This block should probably all be handled by CopyNativeLibraries. // TODO(cjhopman): Why is this packaging native libs as assets even when native exopackage is // enabled? if (nativeFilesInfo.nativeLibsAssetsDirs.isPresent() && nativeFilesInfo.nativeLibsAssetsDirs.get().containsKey(module)) { // Copy in cxx libraries marked as assets. Filtering and renaming was already done // in CopyNativeLibraries.getBuildSteps(). Path cxxNativeLibsSrc = context .getSourcePathResolver() .getRelativePath(nativeFilesInfo.nativeLibsAssetsDirs.get().get(module)); steps.add( CopyStep.forDirectory( getProjectFilesystem(), cxxNativeLibsSrc, libSubdirectory, CopyStep.DirectoryMode.CONTENTS_ONLY)); } // Step that populates a list of libraries and writes a metadata.txt to decompress. steps.add( createAssetLibrariesMetadataStep( libSubdirectory, metadataFilename, module, inputAssetLibrariesBuilder)); } if (compressAssetLibraries || !module.isRootModule()) { ImmutableList.Builder<Path> outputAssetLibrariesBuilder = ImmutableList.builder(); steps.add( createRenameAssetLibrariesStep( module, inputAssetLibrariesBuilder, outputAssetLibrariesBuilder)); // Concat and xz compress. Path libOutputBlob = libSubdirectory.resolve("libraries.blob"); steps.add(new ConcatStep(getProjectFilesystem(), outputAssetLibrariesBuilder, libOutputBlob)); steps.add( CompressionAlgorithmCreator.createCompressionStep( assetCompressionAlgorithm.orElse(CompressionAlgorithm.XZ), getProjectFilesystem(), libOutputBlob, libSubdirectory, xzCompressionLevel)); } } private AbstractExecutionStep createRenameAssetLibrariesStep( APKModule module, ImmutableSortedSet.Builder<Path> inputAssetLibrariesBuilder, ImmutableList.Builder<Path> outputAssetLibrariesBuilder) { return new AbstractExecutionStep("rename_asset_libraries_as_temp_files_" + module.getName()) { @Override public StepExecutionResult execute(ExecutionContext context) throws IOException { ProjectFilesystem filesystem = getProjectFilesystem(); for (Path libPath : inputAssetLibrariesBuilder.build()) { Path tempPath = libPath.resolveSibling(libPath.getFileName() + "~"); filesystem.move(libPath, tempPath); outputAssetLibrariesBuilder.add(tempPath); } return StepExecutionResults.SUCCESS; } }; } private AbstractExecutionStep createAssetLibrariesMetadataStep( Path libSubdirectory, String metadataFilename, APKModule module, ImmutableSortedSet.Builder<Path> inputAssetLibrariesBuilder) { return new AbstractExecutionStep("write_metadata_for_asset_libraries_" + module.getName()) { @Override public StepExecutionResult execute(ExecutionContext context) throws IOException { ProjectFilesystem filesystem = getProjectFilesystem(); // Walk file tree to find libraries filesystem.walkRelativeFileTree( libSubdirectory, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (!file.toString().endsWith(".so")) { throw new IOException("unexpected file in lib directory"); } inputAssetLibrariesBuilder.add(file); return FileVisitResult.CONTINUE; } }); // Write a metadata ImmutableList.Builder<String> metadataLines = ImmutableList.builder(); Path metadataOutput = libSubdirectory.resolve(metadataFilename); for (Path libPath : inputAssetLibrariesBuilder.build()) { // Should return something like x86/libfoo.so Path relativeLibPath = libSubdirectory.relativize(libPath); long filesize = filesystem.getFileSize(libPath); String desiredOutput = relativeLibPath.toString(); String checksum = filesystem.computeSha256(libPath); metadataLines.add(desiredOutput + ' ' + filesize + ' ' + checksum); } ImmutableList<String> metadata = metadataLines.build(); if (!metadata.isEmpty()) { filesystem.writeLinesToPath(metadata, metadataOutput); } return StepExecutionResults.SUCCESS; } }; } private Supplier<KeystoreProperties> getKeystorePropertiesSupplier( SourcePathResolverAdapter resolver, Path pathToKeystore) { return MoreSuppliers.memoize( () -> { try { return KeystoreProperties.createFromPropertiesFile( pathToKeystore, resolver.getAbsolutePath(keystorePropertiesPath), getProjectFilesystem()); } catch (IOException e) { throw new RuntimeException(); } }); } private RepackZipEntriesStep createRepackZipEntriesStep( Path signedApkPath, Path compressedApkPath) { return new RepackZipEntriesStep( getProjectFilesystem(), signedApkPath, compressedApkPath, ImmutableSet.of("resources.arsc")); } private Iterable<Step> createRedexSteps( BuildContext context, BuildableContext buildableContext, SourcePathResolverAdapter resolver, Supplier<KeystoreProperties> keystoreProperties, Path apkToRedexAndAlign, Path redexedApk) { ImmutableList.Builder<Step> steps = ImmutableList.builder(); Path proguardConfigDir = getProguardTextFilesPath(); steps.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), redexedApk.getParent()))); ImmutableList<Step> redexSteps = ReDexStep.createSteps( getProjectFilesystem(), androidSdkLocation, resolver, redexOptions.get(), apkToRedexAndAlign, redexedApk, keystoreProperties, proguardConfigDir, buildableContext); steps.addAll(redexSteps); return steps.build(); } private CopyStep createCopyProguardFilesStep( SourcePathResolverAdapter pathResolver, SourcePath proguardTextFilesPath) { return CopyStep.forDirectory( getProjectFilesystem(), pathResolver.getRelativePath(proguardTextFilesPath), getProguardTextFilesPath(), CopyStep.DirectoryMode.CONTENTS_ONLY); } public ProjectFilesystem getProjectFilesystem() { return filesystem; } public BuildTarget getBuildTarget() { return buildTarget; } public Path getManifestPath() { return BuildTargetPaths.getGenPath( getProjectFilesystem(), getBuildTarget(), "%s/AndroidManifest.xml"); } /** All native-libs-as-assets are copied to this directory before running apkbuilder. */ private Path getPathForNativeLibsAsAssets() { return BuildTargetPaths.getScratchPath( getProjectFilesystem(), getBuildTarget(), "__native_libs_as_assets_%s__"); } /** The APK at this path will be jar signed, but not zipaligned. */ private Path getSignedApkPath() { return Paths.get( getUnsignedApkPath() .replaceAll("\\.unsigned\\.apk$", ".signed.apk") .replaceAll("\\.unsigned\\.aab$", ".signed.aab")); } /** The APK at this path will be zipaligned and jar signed. */ private Path getZipalignedApkPath() { return Paths.get( getUnsignedApkPath() .replaceAll("\\.unsigned\\.apk$", ".zipaligned.apk") .replaceAll("\\.unsigned\\.aab$", ".signed.aab")); } /** The APK at this path will be zipaligned and v2 signed. */ Path getFinalApkPath() { return Paths.get( getUnsignedApkPath() .replaceAll("\\.unsigned\\.apk$", ".apk") .replaceAll("\\.unsigned\\.aab$", ".aab")); } /** The APK at this path will have compressed resources, but will not be zipaligned. */ private Path getCompressedResourcesApkPath() { return Paths.get( getUnsignedApkPath() .replaceAll("\\.unsigned\\.apk$", ".compressed.apk") .replaceAll("\\.unsigned\\.aab$", ".compressed.aab")); } private String getUnsignedApkPath() { return getPath("%s.unsigned." + getExtension()).toString(); } private String getExtension() { return isApk ? "apk" : "aab"; } private Path getPath(String format) { return BuildTargetPaths.getGenPath(getProjectFilesystem(), getBuildTarget(), format); } private Path getRedexedApkPath() { Path path = BuildTargetPaths.getGenPath(getProjectFilesystem(), getBuildTarget(), "%s__redex"); return path.resolve(getBuildTarget().getShortName() + ".redex." + getExtension()); } /** * Directory of text files used by proguard. Unforunately, this contains both inputs and outputs. */ private Path getProguardTextFilesPath() { return BuildTargetPaths.getGenPath(getProjectFilesystem(), getBuildTarget(), "%s/proguard"); } @VisibleForTesting static Path getProguardOutputFromInputClasspath(Path proguardConfigDir, Path classpathEntry) { // Hehe, this is so ridiculously fragile. Preconditions.checkArgument( !classpathEntry.isAbsolute(), "Classpath entries should be relative rather than absolute paths: %s", classpathEntry); String obfuscatedName = Files.getNameWithoutExtension(classpathEntry.toString()) + "-obfuscated.jar"; Path dirName = classpathEntry.getParent(); return proguardConfigDir.resolve(dirName).resolve(obfuscatedName); } }
/* * Copyright 2019 OICR * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.dockstore.webservice.helpers.statelisteners; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.gson.Gson; import io.dockstore.webservice.CustomWebApplicationException; import io.dockstore.webservice.DockstoreWebserviceConfiguration; import io.dockstore.webservice.core.AppTool; import io.dockstore.webservice.core.BioWorkflow; import io.dockstore.webservice.core.Category; import io.dockstore.webservice.core.Entry; import io.dockstore.webservice.core.Label; import io.dockstore.webservice.core.Service; import io.dockstore.webservice.core.SourceFile; import io.dockstore.webservice.core.Tool; import io.dockstore.webservice.core.User; import io.dockstore.webservice.core.Version; import io.dockstore.webservice.core.Workflow; import io.dockstore.webservice.helpers.ElasticSearchHelper; import io.dockstore.webservice.helpers.StateManagerMode; import io.dropwizard.jackson.Jackson; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.xcontent.XContentType; import org.hibernate.Hibernate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Formerly the ElasticManager, this listens for changes that might affect elastic search */ public class ElasticListener implements StateListenerInterface { public static DockstoreWebserviceConfiguration config; public static final String TOOLS_INDEX = "tools"; public static final String WORKFLOWS_INDEX = "workflows"; public static final String ALL_INDICES = "tools,workflows"; private static final Logger LOGGER = LoggerFactory.getLogger(ElasticListener.class); private static final ObjectMapper MAPPER = Jackson.newObjectMapper(); private static final String MAPPER_ERROR = "Could not convert Dockstore entry to Elasticsearch object"; private DockstoreWebserviceConfiguration.ElasticSearchConfig elasticSearchConfig; @Override public void setConfig(DockstoreWebserviceConfiguration config) { this.elasticSearchConfig = config.getEsConfiguration(); } /** * Manually eager load certain fields * @param entry */ private void eagerLoadEntry(Entry entry) { Hibernate.initialize(entry.getAliases()); } @Override public void handleIndexUpdate(Entry entry, StateManagerMode command) { eagerLoadEntry(entry); entry = filterCheckerWorkflows(entry); // #2771 will need to disable this and properly create objects to get services into the index entry = entry instanceof Service ? null : entry; if (entry == null) { return; } LOGGER.info("Performing index update with " + command + "."); if (StringUtils.isEmpty(elasticSearchConfig.getHostname())) { LOGGER.error("No elastic search host found."); return; } if (!checkValid(entry, command)) { LOGGER.info("Could not perform the elastic search index update."); return; } try { RestHighLevelClient client = ElasticSearchHelper.restHighLevelClient(); String entryType = entry instanceof Tool || entry instanceof AppTool ? TOOLS_INDEX : WORKFLOWS_INDEX; DocWriteResponse post; switch (command) { case PUBLISH: case UPDATE: UpdateRequest updateRequest = new UpdateRequest(entryType, String.valueOf(entry.getId())); String json = MAPPER.writeValueAsString(dockstoreEntryToElasticSearchObject(entry)); // The below should've worked but it doesn't, the 2 lines after are used instead // updateRequest.upsert(json, XContentType.JSON); updateRequest.doc(json, XContentType.JSON); updateRequest.docAsUpsert(true); post = client.update(updateRequest, RequestOptions.DEFAULT); break; case DELETE: DeleteRequest deleteRequest = new DeleteRequest(entryType, String.valueOf(entry.getId())); post = client.delete(deleteRequest, RequestOptions.DEFAULT); break; default: throw new RuntimeException("Unknown index command: " + command); } int statusCode = post.status().getStatus(); if (statusCode == HttpStatus.SC_OK || statusCode == HttpStatus.SC_CREATED) { LOGGER.info("Successful " + command + "."); } else { LOGGER.error("Could not submit index to elastic search " + post.status()); } } catch (Exception e) { LOGGER.error("Could not submit index to elastic search. " + e.getMessage()); } } /** * Check if the entry is valid to perform the elastic operation * * @param entry The entry to check * @param command The command that will be used * @return Whether or not the entry is valid */ private boolean checkValid(Entry<?, ?> entry, StateManagerMode command) { boolean published = entry.getIsPublished(); switch (command) { case PUBLISH: case UPDATE: if (published) { return true; } break; case DELETE: // Try deleting no matter what return true; default: LOGGER.error("Unrecognized Elasticsearch command."); return false; } return false; } @Override public void bulkUpsert(List<Entry> entries) { entries.forEach(this::eagerLoadEntry); entries = filterCheckerWorkflows(entries); // #2771 will need to disable this and properly create objects to get services into the index if (entries.isEmpty()) { return; } // sort entries into workflows and tools List<Entry> workflowsEntryList = entries.stream().filter(entry -> (entry instanceof BioWorkflow)).collect(Collectors.toList()); List<Entry> toolsEntryList = entries.stream().filter(entry -> (entry instanceof Tool) || (entry instanceof AppTool)).collect(Collectors.toList()); if (!workflowsEntryList.isEmpty()) { postBulkUpdate(WORKFLOWS_INDEX, workflowsEntryList); } if (!toolsEntryList.isEmpty()) { postBulkUpdate(TOOLS_INDEX, toolsEntryList); } } private void postBulkUpdate(String index, List<Entry> entries) { BulkProcessor.Listener listener = new BulkProcessor.Listener() { @Override public void beforeBulk(long executionId, BulkRequest request) { int numberOfActions = request.numberOfActions(); LOGGER.info("Executing bulk [{}] with {} requests", executionId, numberOfActions); } @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { LOGGER.error("Bulk [{}] executed with failures", executionId); } else { LOGGER.info("Bulk [{}] completed in {} milliseconds", executionId, response.getTook().getMillis()); } } @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { LOGGER.error("Failed to execute bulk", failure); } }; try { RestHighLevelClient client = ElasticSearchHelper.restHighLevelClient(); BulkProcessor.Builder builder = BulkProcessor.builder( (request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener), listener); // Set size of actions with `builder.setBulkSize()`, defaults to 5 MB BulkProcessor bulkProcessor = builder.build(); entries.forEach(entry -> { try { String s = MAPPER.writeValueAsString(dockstoreEntryToElasticSearchObject(entry)); bulkProcessor.add(new IndexRequest(index).id(String.valueOf(entry.getId())).source(s, XContentType.JSON)); } catch (IOException e) { LOGGER.error(MAPPER_ERROR, e); throw new CustomWebApplicationException(MAPPER_ERROR, HttpStatus.SC_INTERNAL_SERVER_ERROR); } }); try { // When doing a bulk index, this is the max amount of time the bulk listener should wait before considering the // bulk request as failed. 1 minute appears to be more than enough time to index all the current Dockstore entries. // However, 5 minutes is used instead (just in case) final long bulkProcessorWaitTimeInMinutes = 5L; boolean terminated = bulkProcessor.awaitClose(bulkProcessorWaitTimeInMinutes, TimeUnit.MINUTES); if (!terminated) { LOGGER.error("Could not submit " + index + " index to elastic search in time"); throw new CustomWebApplicationException("Could not submit " + index + " index to elastic search in time", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } catch (InterruptedException e) { LOGGER.error("Could not submit " + index + " index to elastic search. " + e.getMessage(), e); throw new CustomWebApplicationException("Could not submit " + index + " index to elastic search", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } catch (Exception e) { LOGGER.error("Could not submit " + index + " index to elastic search. " + e.getMessage(), e); throw new CustomWebApplicationException("Could not submit " + index + " index to elastic search", HttpStatus.SC_INTERNAL_SERVER_ERROR); } } /** * This should be using an actual Elasticsearch object class instead of jsonNode * * @param entry The Dockstore entry * @return The Elasticsearch object string to be placed into the index * @throws IOException Mapper problems */ public static JsonNode dockstoreEntryToElasticSearchObject(final Entry entry) throws IOException { Set<Version> workflowVersions = entry.getWorkflowVersions(); boolean verified = workflowVersions.stream().anyMatch(Version::isVerified); Set<String> verifiedPlatforms = getVerifiedPlatforms(workflowVersions); Entry detachedEntry = removeIrrelevantProperties(entry); JsonNode jsonNode = MAPPER.readTree(MAPPER.writeValueAsString(detachedEntry)); ((ObjectNode)jsonNode).put("verified", verified); ((ObjectNode)jsonNode).put("verified_platforms", MAPPER.valueToTree(verifiedPlatforms)); addCategoriesJson(jsonNode, entry); return jsonNode; } private static void addCategoriesJson(JsonNode node, Entry<?, ?> entry) { List<Map<String, Object>> values = new ArrayList<>(); for (Category category: entry.getCategories()) { Map<String, Object> value = new LinkedHashMap<>(); value.put("id", category.getId()); value.put("name", category.getName()); value.put("description", category.getDescription()); value.put("displayName", category.getDisplayName()); value.put("topic", category.getTopic()); values.add(value); } ((ObjectNode)node).put("categories", MAPPER.valueToTree(values)); } /** * Remove some stuff that should not be indexed by ES. * This is not ideal, we should be including things we want indexed, not removing. * @param entry */ private static Entry removeIrrelevantProperties(final Entry entry) { Entry detachedEntry; if (entry instanceof Tool) { Tool tool = (Tool) entry; Tool detachedTool = new Tool(); tool.getWorkflowVersions().forEach(version -> { Hibernate.initialize(version.getSourceFiles()); }); // These are for facets detachedTool.setDescriptorType(tool.getDescriptorType()); detachedTool.setDefaultWdlPath(tool.getDefaultWdlPath()); detachedTool.setDefaultCwlPath(tool.getDefaultCwlPath()); detachedTool.setNamespace(tool.getNamespace()); detachedTool.setRegistry(tool.getRegistry()); detachedTool.setPrivateAccess(tool.isPrivateAccess()); // These are for table detachedTool.setGitUrl(tool.getGitUrl()); detachedTool.setName(tool.getName()); detachedTool.setToolname(tool.getToolname()); // This is some weird hack to always use topicAutomatic for search table detachedTool.setTopicAutomatic(tool.getTopic()); detachedEntry = detachedTool; } else if (entry instanceof BioWorkflow) { BioWorkflow bioWorkflow = (BioWorkflow) entry; BioWorkflow detachedBioWorkflow = new BioWorkflow(); detachedEntry = detachWorkflow(detachedBioWorkflow, bioWorkflow); } else if (entry instanceof AppTool) { AppTool appTool = (AppTool) entry; AppTool detachedAppTool = new AppTool(); detachedEntry = detachWorkflow(detachedAppTool, appTool); } else { return entry; } detachedEntry.setDescription(entry.getDescription()); detachedEntry.setAuthor(entry.getAuthor()); detachedEntry.setAliases(entry.getAliases()); detachedEntry.setLabels((SortedSet<Label>)entry.getLabels()); detachedEntry.setCheckerWorkflow(entry.getCheckerWorkflow()); Set<Version> detachedVersions = cloneWorkflowVersion(entry.getWorkflowVersions()); detachedEntry.setWorkflowVersions(detachedVersions); // This is some weird hack to always set the topic (which is either automatic or manual) into the ES topicAutomatic property for search table // This is to avoid indexing both topicAutomatic and topicManual and having the frontend choose which one to display detachedEntry.setTopicAutomatic(entry.getTopic()); detachedEntry.setInputFileFormats(new TreeSet<>(entry.getInputFileFormats())); entry.getStarredUsers().forEach(user -> detachedEntry.addStarredUser((User)user)); String defaultVersion = entry.getDefaultVersion(); if (defaultVersion != null) { boolean saneDefaultVersion = detachedVersions.stream().anyMatch(version -> defaultVersion.equals(version.getName()) || defaultVersion.equals(version.getReference())); if (saneDefaultVersion) { // If the tool/workflow has a default version, only keep the default version (and its sourcefile contents and description) Set<Version> newWorkflowVersions = detachedEntry.getWorkflowVersions(); newWorkflowVersions.forEach(version -> { if (!defaultVersion.equals(version.getReference()) && !defaultVersion.equals(version.getName())) { version.setDescriptionAndDescriptionSource(null, null); SortedSet<SourceFile> sourceFiles = version.getSourceFiles(); sourceFiles.forEach(sourceFile -> sourceFile.setContent("")); } }); } else { LOGGER.error("Entry has a default version that doesn't exist: " + entry.getEntryPath()); } } return detachedEntry; } private static Set<Version> cloneWorkflowVersion(final Set<Version> originalWorkflowVersions) { Set<Version> detachedVersions = new HashSet<>(); originalWorkflowVersions.forEach(workflowVersion -> { Version detatchedVersion = workflowVersion.createEmptyVersion(); detatchedVersion.setDescriptionAndDescriptionSource(workflowVersion.getDescription(), workflowVersion.getDescriptionSource()); detatchedVersion.setInputFileFormats(new TreeSet<>(workflowVersion.getInputFileFormats())); detatchedVersion.setOutputFileFormats(new TreeSet<>(workflowVersion.getOutputFileFormats())); detatchedVersion.setName(workflowVersion.getName()); detatchedVersion.setReference(workflowVersion.getReference()); SortedSet<SourceFile> sourceFiles = workflowVersion.getSourceFiles(); sourceFiles.forEach(sourceFile -> { Gson gson = new Gson(); String gsonString = gson.toJson(sourceFile); SourceFile detachedSourceFile = gson.fromJson(gsonString, SourceFile.class); detatchedVersion.addSourceFile(detachedSourceFile); }); detatchedVersion.updateVerified(); detachedVersions.add(detatchedVersion); }); return detachedVersions; } private static Workflow detachWorkflow(Workflow detachedWorkflow, Workflow workflow) { // These are for facets detachedWorkflow.setDescriptorType(workflow.getDescriptorType()); detachedWorkflow.setSourceControl(workflow.getSourceControl()); detachedWorkflow.setOrganization(workflow.getOrganization()); // These are for table detachedWorkflow.setWorkflowName(workflow.getWorkflowName()); detachedWorkflow.setRepository(workflow.getRepository()); detachedWorkflow.setGitUrl(workflow.getGitUrl()); return detachedWorkflow; } private static Set<String> getVerifiedPlatforms(Set<? extends Version> workflowVersions) { Set<String> platforms = new TreeSet<>(); workflowVersions.forEach(workflowVersion -> { SortedSet<SourceFile> sourceFiles = workflowVersion.getSourceFiles(); sourceFiles.forEach(sourceFile -> { Map<String, SourceFile.VerificationInformation> verifiedBySource = sourceFile.getVerifiedBySource(); platforms.addAll(verifiedBySource.keySet()); }); }); return platforms; } /** * If entry is a checker workflow, return null. Otherwise, return entry * @param entry The entry to check * @return null if checker, entry otherwise */ private static Entry filterCheckerWorkflows(Entry entry) { return entry instanceof Workflow && ((Workflow)entry).isIsChecker() ? null : entry; } /** * Remove checker workflow from list of entries * @param entries List of all entries * @return List of entries without checker workflows */ public static List<Entry> filterCheckerWorkflows(List<Entry> entries) { return entries.stream().filter(entry -> entry instanceof Tool || (entry instanceof Workflow && !((Workflow)entry).isIsChecker())).collect(Collectors.toList()); } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.openvr; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * Holds the transform for a single bone. * * <h3>Layout</h3> * * <pre><code> * struct VRBoneTransform_t { * {@link HmdVector4 HmdVector4_t} position; * {@link HmdQuaternionf HmdQuaternionf_t} orientation; * }</code></pre> */ @NativeType("struct VRBoneTransform_t") public class VRBoneTransform extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int POSITION, ORIENTATION; static { Layout layout = __struct( __member(HmdVector4.SIZEOF, HmdVector4.ALIGNOF), __member(HmdQuaternionf.SIZEOF, HmdQuaternionf.ALIGNOF) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); POSITION = layout.offsetof(0); ORIENTATION = layout.offsetof(1); } /** * Creates a {@code VRBoneTransform} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VRBoneTransform(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** @return a {@link HmdVector4} view of the {@code position} field. */ @NativeType("HmdVector4_t") public HmdVector4 position$() { return nposition$(address()); } /** @return a {@link HmdQuaternionf} view of the {@code orientation} field. */ @NativeType("HmdQuaternionf_t") public HmdQuaternionf orientation() { return norientation(address()); } // ----------------------------------- /** Returns a new {@code VRBoneTransform} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VRBoneTransform malloc() { return wrap(VRBoneTransform.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VRBoneTransform} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VRBoneTransform calloc() { return wrap(VRBoneTransform.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VRBoneTransform} instance allocated with {@link BufferUtils}. */ public static VRBoneTransform create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VRBoneTransform.class, memAddress(container), container); } /** Returns a new {@code VRBoneTransform} instance for the specified memory address. */ public static VRBoneTransform create(long address) { return wrap(VRBoneTransform.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VRBoneTransform createSafe(long address) { return address == NULL ? null : wrap(VRBoneTransform.class, address); } /** * Returns a new {@link VRBoneTransform.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VRBoneTransform.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VRBoneTransform.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VRBoneTransform.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VRBoneTransform.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VRBoneTransform.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VRBoneTransform.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VRBoneTransform.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VRBoneTransform.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } // ----------------------------------- /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VRBoneTransform mallocStack() { return malloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VRBoneTransform callocStack() { return calloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VRBoneTransform mallocStack(MemoryStack stack) { return malloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VRBoneTransform callocStack(MemoryStack stack) { return calloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VRBoneTransform.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VRBoneTransform.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VRBoneTransform.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VRBoneTransform.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); } /** * Returns a new {@code VRBoneTransform} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VRBoneTransform malloc(MemoryStack stack) { return wrap(VRBoneTransform.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VRBoneTransform} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VRBoneTransform calloc(MemoryStack stack) { return wrap(VRBoneTransform.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VRBoneTransform.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VRBoneTransform.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VRBoneTransform.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VRBoneTransform.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #position$}. */ public static HmdVector4 nposition$(long struct) { return HmdVector4.create(struct + VRBoneTransform.POSITION); } /** Unsafe version of {@link #orientation}. */ public static HmdQuaternionf norientation(long struct) { return HmdQuaternionf.create(struct + VRBoneTransform.ORIENTATION); } // ----------------------------------- /** An array of {@link VRBoneTransform} structs. */ public static class Buffer extends StructBuffer<VRBoneTransform, Buffer> implements NativeResource { private static final VRBoneTransform ELEMENT_FACTORY = VRBoneTransform.create(-1L); /** * Creates a new {@code VRBoneTransform.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VRBoneTransform#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VRBoneTransform getElementFactory() { return ELEMENT_FACTORY; } /** @return a {@link HmdVector4} view of the {@code position} field. */ @NativeType("HmdVector4_t") public HmdVector4 position$() { return VRBoneTransform.nposition$(address()); } /** @return a {@link HmdQuaternionf} view of the {@code orientation} field. */ @NativeType("HmdQuaternionf_t") public HmdQuaternionf orientation() { return VRBoneTransform.norientation(address()); } } }
/* * Copyright 1997-2015 Optimatika (www.optimatika.se) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.ojalgo.matrix.transformation; import java.math.BigDecimal; import org.ojalgo.access.Access1D; import org.ojalgo.constant.BigMath; import org.ojalgo.constant.PrimitiveMath; import org.ojalgo.function.BigFunction; import org.ojalgo.scalar.ComplexNumber; public interface Householder<N extends Number> extends Access1D<N> { public static final class Big extends Object implements Householder<BigDecimal> { public BigDecimal beta; public int first; public final BigDecimal[] vector; public Big(final Householder<BigDecimal> aTransf) { this((int) aTransf.count()); this.copy(aTransf); } public Big(final int aDim) { super(); vector = new BigDecimal[aDim]; beta = BigMath.ZERO; first = 0; } @SuppressWarnings("unused") private Big() { this(0); } public final Householder.Big copy(final Householder<BigDecimal> aSource) { first = aSource.first(); final BigDecimal[] tmpVector = vector; BigDecimal tmpVal, tmpVal2 = BigMath.ZERO; final int tmpSize = (int) aSource.count(); for (int i = aSource.first(); i < tmpSize; i++) { tmpVal = aSource.get(i); tmpVal2 = BigFunction.ADD.invoke(tmpVal2, BigFunction.MULTIPLY.invoke(tmpVal, tmpVal)); tmpVector[i] = tmpVal; } beta = BigFunction.DIVIDE.invoke(BigMath.TWO, tmpVal2); return this; } public final Householder.Big copy(final Householder<BigDecimal> aSource, final BigDecimal precalculatedBeta) { first = aSource.first(); final BigDecimal[] tmpVector = vector; final int tmpSize = (int) aSource.count(); for (int i = aSource.first(); i < tmpSize; i++) { tmpVector[i] = aSource.get(i); } beta = precalculatedBeta; return this; } public long count() { return vector.length; } public double doubleValue(final long anInd) { return vector[(int) anInd].doubleValue(); } public int first() { return first; } public BigDecimal get(final int index) { return vector[index]; } public BigDecimal get(final long index) { return vector[(int) index]; } public int size() { return vector.length; } @Override public String toString() { final StringBuilder retVal = new StringBuilder("{"); final int tmpFirst = first; final int tmpLength = vector.length; for (int i = 0; i < tmpFirst; i++) { retVal.append(BigMath.ZERO); retVal.append(", "); } for (int i = first; i < tmpLength; i++) { retVal.append(vector[i]); if ((i + 1) < tmpLength) { retVal.append(", "); } } retVal.append("}"); return retVal.toString(); } } public static final class Complex extends Object implements Householder<ComplexNumber> { public ComplexNumber beta; public int first; public final ComplexNumber[] vector; public Complex(final Householder<ComplexNumber> aTransf) { this((int) aTransf.count()); this.copy(aTransf); } public Complex(final int aDim) { super(); vector = new ComplexNumber[aDim]; beta = ComplexNumber.ZERO; first = 0; } @SuppressWarnings("unused") private Complex() { this(0); } public final Householder.Complex copy(final Householder<ComplexNumber> aSource) { first = aSource.first(); final ComplexNumber[] tmpVector = vector; ComplexNumber tmpNmbr; double tmpVal, tmpVal2 = PrimitiveMath.ZERO; final int tmpSize = (int) aSource.count(); for (int i = aSource.first(); i < tmpSize; i++) { tmpNmbr = aSource.get(i); tmpVal = tmpNmbr.norm(); tmpVal2 += tmpVal * tmpVal; tmpVector[i] = tmpNmbr; } beta = ComplexNumber.valueOf(PrimitiveMath.TWO / tmpVal2); return this; } public final Householder.Complex copy(final Householder<ComplexNumber> aSource, final ComplexNumber precalculatedBeta) { first = aSource.first(); final ComplexNumber[] tmpVector = vector; final int tmpSize = (int) aSource.count(); for (int i = aSource.first(); i < tmpSize; i++) { tmpVector[i] = aSource.get(i); } beta = precalculatedBeta; return this; } public long count() { return vector.length; } public double doubleValue(final long anInd) { return vector[(int) anInd].doubleValue(); } public int first() { return first; } public ComplexNumber get(final int index) { return vector[index]; } public ComplexNumber get(final long index) { return vector[(int) index]; } public int size() { return vector.length; } @Override public String toString() { final StringBuilder retVal = new StringBuilder("{"); final int tmpFirst = first; final int tmpLength = vector.length; for (int i = 0; i < tmpFirst; i++) { retVal.append(ComplexNumber.ZERO); retVal.append(", "); } for (int i = first; i < tmpLength; i++) { retVal.append(vector[i]); if ((i + 1) < tmpLength) { retVal.append(", "); } } retVal.append("}"); return retVal.toString(); } } public static final class Primitive extends Object implements Householder<Double> { public double beta; public int first; public final double[] vector; public Primitive(final Householder<Double> aTransf) { this((int) aTransf.count()); this.copy(aTransf); } public Primitive(final int aDim) { super(); vector = new double[aDim]; beta = PrimitiveMath.ZERO; first = 0; } @SuppressWarnings("unused") private Primitive() { this(0); } public final Householder.Primitive copy(final Householder<Double> aSource) { first = aSource.first(); final double[] tmpVector = vector; double tmpVal, tmpVal2 = PrimitiveMath.ZERO; final int tmpSize = (int) aSource.count(); for (int i = aSource.first(); i < tmpSize; i++) { tmpVal = aSource.doubleValue(i); tmpVal2 += tmpVal * tmpVal; tmpVector[i] = tmpVal; } beta = PrimitiveMath.TWO / tmpVal2; return this; } public final Householder.Primitive copy(final Householder<Double> aSource, final double precalculatedBeta) { first = aSource.first(); final double[] tmpVector = vector; final int tmpSize = (int) aSource.count(); for (int i = aSource.first(); i < tmpSize; i++) { tmpVector[i] = aSource.doubleValue(i); } beta = precalculatedBeta; return this; } public long count() { return vector.length; } public double doubleValue(final long anInd) { return vector[(int) anInd]; } public int first() { return first; } public Double get(final int index) { return vector[index]; } public Double get(final long index) { return vector[(int) index]; } public int size() { return vector.length; } @Override public String toString() { final StringBuilder retVal = new StringBuilder("{ "); final int tmpLastIndex = vector.length - 1; for (int i = 0; i < tmpLastIndex; i++) { retVal.append(this.get(i)); retVal.append(", "); } retVal.append(this.get(tmpLastIndex)); retVal.append(" }"); return retVal.toString(); } } /** * Regardless of what is actually returned by {@linkplain #doubleValue(long)} and/or * {@linkplain #get(long)} vector elements with indeces less than 'first' should be assumed to be, and * treated as if they are, zero. */ int first(); }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.airlift.compress.snappy; import java.util.zip.Checksum; /** * A pure-java implementation of the CRC32 checksum that uses * the CRC32-C polynomial, the same polynomial used by iSCSI * and implemented on many Intel chipsets supporting SSE4.2. */ // this code was taken from Apache Hadoop class Crc32C implements Checksum { private static final int MASK_DELTA = 0xa282ead8; public static int maskedCrc32c(byte[] data) { return maskedCrc32c(data, 0, data.length); } public static int maskedCrc32c(byte[] data, int offset, int length) { Crc32C crc32c = new Crc32C(); crc32c.update(data, offset, length); return crc32c.getMaskedValue(); } /** * Return a masked representation of crc. * <p/> * Motivation: it is problematic to compute the CRC of a string that * contains embedded CRCs. Therefore we recommend that CRCs stored * somewhere (e.g., in files) should be masked before being stored. */ public static int mask(int crc) { // Rotate right by 15 bits and add a constant. return ((crc >>> 15) | (crc << 17)) + MASK_DELTA; } /** * Return the crc whose masked representation is masked_crc. */ public static int unmask(int maskedCrc) { int rot = maskedCrc - MASK_DELTA; return ((rot >>> 17) | (rot << 15)); } /** * the current CRC value, bit-flipped */ private int crc; /** * Create a new PureJavaCrc32 object. */ public Crc32C() { reset(); } public int getMaskedValue() { return mask(getIntValue()); } public int getIntValue() { return ~crc; } @Override public long getValue() { long ret = crc; return (~ret) & 0xffffffffL; } @Override public void reset() { crc = 0xffffffff; } @Override public void update(byte[] b, int off, int len) { int localCrc = crc; while (len > 7) { int c0 = b[off++] ^ localCrc; localCrc >>>= 8; int c1 = b[off++] ^ localCrc; localCrc >>>= 8; int c2 = b[off++] ^ localCrc; localCrc >>>= 8; int c3 = b[off++] ^ localCrc; localCrc = (T8_7[c0 & 0xff] ^ T8_6[c1 & 0xff]) ^ (T8_5[c2 & 0xff] ^ T8_4[c3 & 0xff]); localCrc ^= (T8_3[b[off++] & 0xff] ^ T8_2[b[off++] & 0xff]) ^ (T8_1[b[off++] & 0xff] ^ T8_0[b[off++] & 0xff]); len -= 8; } while (len > 0) { localCrc = (localCrc >>> 8) ^ T8_0[(localCrc ^ b[off++]) & 0xff]; len--; } // Publish crc out to object crc = localCrc; } public void update(int b) { crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff]; } // CRC polynomial tables generated by: // java -cp build/test/classes/:build/classes/ \ // org.apache.hadoop.util.TestPureJavaCrc32\$Table 82F63B78 static final int[] T8_0 = new int[] { 0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4, 0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB, 0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B, 0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24, 0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B, 0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384, 0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54, 0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B, 0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A, 0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35, 0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5, 0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA, 0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45, 0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A, 0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A, 0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595, 0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48, 0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957, 0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687, 0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198, 0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927, 0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38, 0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8, 0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7, 0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096, 0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789, 0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859, 0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46, 0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9, 0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6, 0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36, 0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829, 0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C, 0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93, 0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043, 0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C, 0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3, 0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC, 0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C, 0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033, 0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652, 0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D, 0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D, 0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982, 0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D, 0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622, 0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2, 0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED, 0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530, 0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F, 0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF, 0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0, 0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F, 0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540, 0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90, 0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F, 0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE, 0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1, 0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321, 0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E, 0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81, 0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E, 0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E, 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351 }; static final int[] T8_1 = new int[] { 0x00000000, 0x13A29877, 0x274530EE, 0x34E7A899, 0x4E8A61DC, 0x5D28F9AB, 0x69CF5132, 0x7A6DC945, 0x9D14C3B8, 0x8EB65BCF, 0xBA51F356, 0xA9F36B21, 0xD39EA264, 0xC03C3A13, 0xF4DB928A, 0xE7790AFD, 0x3FC5F181, 0x2C6769F6, 0x1880C16F, 0x0B225918, 0x714F905D, 0x62ED082A, 0x560AA0B3, 0x45A838C4, 0xA2D13239, 0xB173AA4E, 0x859402D7, 0x96369AA0, 0xEC5B53E5, 0xFFF9CB92, 0xCB1E630B, 0xD8BCFB7C, 0x7F8BE302, 0x6C297B75, 0x58CED3EC, 0x4B6C4B9B, 0x310182DE, 0x22A31AA9, 0x1644B230, 0x05E62A47, 0xE29F20BA, 0xF13DB8CD, 0xC5DA1054, 0xD6788823, 0xAC154166, 0xBFB7D911, 0x8B507188, 0x98F2E9FF, 0x404E1283, 0x53EC8AF4, 0x670B226D, 0x74A9BA1A, 0x0EC4735F, 0x1D66EB28, 0x298143B1, 0x3A23DBC6, 0xDD5AD13B, 0xCEF8494C, 0xFA1FE1D5, 0xE9BD79A2, 0x93D0B0E7, 0x80722890, 0xB4958009, 0xA737187E, 0xFF17C604, 0xECB55E73, 0xD852F6EA, 0xCBF06E9D, 0xB19DA7D8, 0xA23F3FAF, 0x96D89736, 0x857A0F41, 0x620305BC, 0x71A19DCB, 0x45463552, 0x56E4AD25, 0x2C896460, 0x3F2BFC17, 0x0BCC548E, 0x186ECCF9, 0xC0D23785, 0xD370AFF2, 0xE797076B, 0xF4359F1C, 0x8E585659, 0x9DFACE2E, 0xA91D66B7, 0xBABFFEC0, 0x5DC6F43D, 0x4E646C4A, 0x7A83C4D3, 0x69215CA4, 0x134C95E1, 0x00EE0D96, 0x3409A50F, 0x27AB3D78, 0x809C2506, 0x933EBD71, 0xA7D915E8, 0xB47B8D9F, 0xCE1644DA, 0xDDB4DCAD, 0xE9537434, 0xFAF1EC43, 0x1D88E6BE, 0x0E2A7EC9, 0x3ACDD650, 0x296F4E27, 0x53028762, 0x40A01F15, 0x7447B78C, 0x67E52FFB, 0xBF59D487, 0xACFB4CF0, 0x981CE469, 0x8BBE7C1E, 0xF1D3B55B, 0xE2712D2C, 0xD69685B5, 0xC5341DC2, 0x224D173F, 0x31EF8F48, 0x050827D1, 0x16AABFA6, 0x6CC776E3, 0x7F65EE94, 0x4B82460D, 0x5820DE7A, 0xFBC3FAF9, 0xE861628E, 0xDC86CA17, 0xCF245260, 0xB5499B25, 0xA6EB0352, 0x920CABCB, 0x81AE33BC, 0x66D73941, 0x7575A136, 0x419209AF, 0x523091D8, 0x285D589D, 0x3BFFC0EA, 0x0F186873, 0x1CBAF004, 0xC4060B78, 0xD7A4930F, 0xE3433B96, 0xF0E1A3E1, 0x8A8C6AA4, 0x992EF2D3, 0xADC95A4A, 0xBE6BC23D, 0x5912C8C0, 0x4AB050B7, 0x7E57F82E, 0x6DF56059, 0x1798A91C, 0x043A316B, 0x30DD99F2, 0x237F0185, 0x844819FB, 0x97EA818C, 0xA30D2915, 0xB0AFB162, 0xCAC27827, 0xD960E050, 0xED8748C9, 0xFE25D0BE, 0x195CDA43, 0x0AFE4234, 0x3E19EAAD, 0x2DBB72DA, 0x57D6BB9F, 0x447423E8, 0x70938B71, 0x63311306, 0xBB8DE87A, 0xA82F700D, 0x9CC8D894, 0x8F6A40E3, 0xF50789A6, 0xE6A511D1, 0xD242B948, 0xC1E0213F, 0x26992BC2, 0x353BB3B5, 0x01DC1B2C, 0x127E835B, 0x68134A1E, 0x7BB1D269, 0x4F567AF0, 0x5CF4E287, 0x04D43CFD, 0x1776A48A, 0x23910C13, 0x30339464, 0x4A5E5D21, 0x59FCC556, 0x6D1B6DCF, 0x7EB9F5B8, 0x99C0FF45, 0x8A626732, 0xBE85CFAB, 0xAD2757DC, 0xD74A9E99, 0xC4E806EE, 0xF00FAE77, 0xE3AD3600, 0x3B11CD7C, 0x28B3550B, 0x1C54FD92, 0x0FF665E5, 0x759BACA0, 0x663934D7, 0x52DE9C4E, 0x417C0439, 0xA6050EC4, 0xB5A796B3, 0x81403E2A, 0x92E2A65D, 0xE88F6F18, 0xFB2DF76F, 0xCFCA5FF6, 0xDC68C781, 0x7B5FDFFF, 0x68FD4788, 0x5C1AEF11, 0x4FB87766, 0x35D5BE23, 0x26772654, 0x12908ECD, 0x013216BA, 0xE64B1C47, 0xF5E98430, 0xC10E2CA9, 0xD2ACB4DE, 0xA8C17D9B, 0xBB63E5EC, 0x8F844D75, 0x9C26D502, 0x449A2E7E, 0x5738B609, 0x63DF1E90, 0x707D86E7, 0x0A104FA2, 0x19B2D7D5, 0x2D557F4C, 0x3EF7E73B, 0xD98EEDC6, 0xCA2C75B1, 0xFECBDD28, 0xED69455F, 0x97048C1A, 0x84A6146D, 0xB041BCF4, 0xA3E32483 }; static final int[] T8_2 = new int[] { 0x00000000, 0xA541927E, 0x4F6F520D, 0xEA2EC073, 0x9EDEA41A, 0x3B9F3664, 0xD1B1F617, 0x74F06469, 0x38513EC5, 0x9D10ACBB, 0x773E6CC8, 0xD27FFEB6, 0xA68F9ADF, 0x03CE08A1, 0xE9E0C8D2, 0x4CA15AAC, 0x70A27D8A, 0xD5E3EFF4, 0x3FCD2F87, 0x9A8CBDF9, 0xEE7CD990, 0x4B3D4BEE, 0xA1138B9D, 0x045219E3, 0x48F3434F, 0xEDB2D131, 0x079C1142, 0xA2DD833C, 0xD62DE755, 0x736C752B, 0x9942B558, 0x3C032726, 0xE144FB14, 0x4405696A, 0xAE2BA919, 0x0B6A3B67, 0x7F9A5F0E, 0xDADBCD70, 0x30F50D03, 0x95B49F7D, 0xD915C5D1, 0x7C5457AF, 0x967A97DC, 0x333B05A2, 0x47CB61CB, 0xE28AF3B5, 0x08A433C6, 0xADE5A1B8, 0x91E6869E, 0x34A714E0, 0xDE89D493, 0x7BC846ED, 0x0F382284, 0xAA79B0FA, 0x40577089, 0xE516E2F7, 0xA9B7B85B, 0x0CF62A25, 0xE6D8EA56, 0x43997828, 0x37691C41, 0x92288E3F, 0x78064E4C, 0xDD47DC32, 0xC76580D9, 0x622412A7, 0x880AD2D4, 0x2D4B40AA, 0x59BB24C3, 0xFCFAB6BD, 0x16D476CE, 0xB395E4B0, 0xFF34BE1C, 0x5A752C62, 0xB05BEC11, 0x151A7E6F, 0x61EA1A06, 0xC4AB8878, 0x2E85480B, 0x8BC4DA75, 0xB7C7FD53, 0x12866F2D, 0xF8A8AF5E, 0x5DE93D20, 0x29195949, 0x8C58CB37, 0x66760B44, 0xC337993A, 0x8F96C396, 0x2AD751E8, 0xC0F9919B, 0x65B803E5, 0x1148678C, 0xB409F5F2, 0x5E273581, 0xFB66A7FF, 0x26217BCD, 0x8360E9B3, 0x694E29C0, 0xCC0FBBBE, 0xB8FFDFD7, 0x1DBE4DA9, 0xF7908DDA, 0x52D11FA4, 0x1E704508, 0xBB31D776, 0x511F1705, 0xF45E857B, 0x80AEE112, 0x25EF736C, 0xCFC1B31F, 0x6A802161, 0x56830647, 0xF3C29439, 0x19EC544A, 0xBCADC634, 0xC85DA25D, 0x6D1C3023, 0x8732F050, 0x2273622E, 0x6ED23882, 0xCB93AAFC, 0x21BD6A8F, 0x84FCF8F1, 0xF00C9C98, 0x554D0EE6, 0xBF63CE95, 0x1A225CEB, 0x8B277743, 0x2E66E53D, 0xC448254E, 0x6109B730, 0x15F9D359, 0xB0B84127, 0x5A968154, 0xFFD7132A, 0xB3764986, 0x1637DBF8, 0xFC191B8B, 0x595889F5, 0x2DA8ED9C, 0x88E97FE2, 0x62C7BF91, 0xC7862DEF, 0xFB850AC9, 0x5EC498B7, 0xB4EA58C4, 0x11ABCABA, 0x655BAED3, 0xC01A3CAD, 0x2A34FCDE, 0x8F756EA0, 0xC3D4340C, 0x6695A672, 0x8CBB6601, 0x29FAF47F, 0x5D0A9016, 0xF84B0268, 0x1265C21B, 0xB7245065, 0x6A638C57, 0xCF221E29, 0x250CDE5A, 0x804D4C24, 0xF4BD284D, 0x51FCBA33, 0xBBD27A40, 0x1E93E83E, 0x5232B292, 0xF77320EC, 0x1D5DE09F, 0xB81C72E1, 0xCCEC1688, 0x69AD84F6, 0x83834485, 0x26C2D6FB, 0x1AC1F1DD, 0xBF8063A3, 0x55AEA3D0, 0xF0EF31AE, 0x841F55C7, 0x215EC7B9, 0xCB7007CA, 0x6E3195B4, 0x2290CF18, 0x87D15D66, 0x6DFF9D15, 0xC8BE0F6B, 0xBC4E6B02, 0x190FF97C, 0xF321390F, 0x5660AB71, 0x4C42F79A, 0xE90365E4, 0x032DA597, 0xA66C37E9, 0xD29C5380, 0x77DDC1FE, 0x9DF3018D, 0x38B293F3, 0x7413C95F, 0xD1525B21, 0x3B7C9B52, 0x9E3D092C, 0xEACD6D45, 0x4F8CFF3B, 0xA5A23F48, 0x00E3AD36, 0x3CE08A10, 0x99A1186E, 0x738FD81D, 0xD6CE4A63, 0xA23E2E0A, 0x077FBC74, 0xED517C07, 0x4810EE79, 0x04B1B4D5, 0xA1F026AB, 0x4BDEE6D8, 0xEE9F74A6, 0x9A6F10CF, 0x3F2E82B1, 0xD50042C2, 0x7041D0BC, 0xAD060C8E, 0x08479EF0, 0xE2695E83, 0x4728CCFD, 0x33D8A894, 0x96993AEA, 0x7CB7FA99, 0xD9F668E7, 0x9557324B, 0x3016A035, 0xDA386046, 0x7F79F238, 0x0B899651, 0xAEC8042F, 0x44E6C45C, 0xE1A75622, 0xDDA47104, 0x78E5E37A, 0x92CB2309, 0x378AB177, 0x437AD51E, 0xE63B4760, 0x0C158713, 0xA954156D, 0xE5F54FC1, 0x40B4DDBF, 0xAA9A1DCC, 0x0FDB8FB2, 0x7B2BEBDB, 0xDE6A79A5, 0x3444B9D6, 0x91052BA8 }; static final int[] T8_3 = new int[] { 0x00000000, 0xDD45AAB8, 0xBF672381, 0x62228939, 0x7B2231F3, 0xA6679B4B, 0xC4451272, 0x1900B8CA, 0xF64463E6, 0x2B01C95E, 0x49234067, 0x9466EADF, 0x8D665215, 0x5023F8AD, 0x32017194, 0xEF44DB2C, 0xE964B13D, 0x34211B85, 0x560392BC, 0x8B463804, 0x924680CE, 0x4F032A76, 0x2D21A34F, 0xF06409F7, 0x1F20D2DB, 0xC2657863, 0xA047F15A, 0x7D025BE2, 0x6402E328, 0xB9474990, 0xDB65C0A9, 0x06206A11, 0xD725148B, 0x0A60BE33, 0x6842370A, 0xB5079DB2, 0xAC072578, 0x71428FC0, 0x136006F9, 0xCE25AC41, 0x2161776D, 0xFC24DDD5, 0x9E0654EC, 0x4343FE54, 0x5A43469E, 0x8706EC26, 0xE524651F, 0x3861CFA7, 0x3E41A5B6, 0xE3040F0E, 0x81268637, 0x5C632C8F, 0x45639445, 0x98263EFD, 0xFA04B7C4, 0x27411D7C, 0xC805C650, 0x15406CE8, 0x7762E5D1, 0xAA274F69, 0xB327F7A3, 0x6E625D1B, 0x0C40D422, 0xD1057E9A, 0xABA65FE7, 0x76E3F55F, 0x14C17C66, 0xC984D6DE, 0xD0846E14, 0x0DC1C4AC, 0x6FE34D95, 0xB2A6E72D, 0x5DE23C01, 0x80A796B9, 0xE2851F80, 0x3FC0B538, 0x26C00DF2, 0xFB85A74A, 0x99A72E73, 0x44E284CB, 0x42C2EEDA, 0x9F874462, 0xFDA5CD5B, 0x20E067E3, 0x39E0DF29, 0xE4A57591, 0x8687FCA8, 0x5BC25610, 0xB4868D3C, 0x69C32784, 0x0BE1AEBD, 0xD6A40405, 0xCFA4BCCF, 0x12E11677, 0x70C39F4E, 0xAD8635F6, 0x7C834B6C, 0xA1C6E1D4, 0xC3E468ED, 0x1EA1C255, 0x07A17A9F, 0xDAE4D027, 0xB8C6591E, 0x6583F3A6, 0x8AC7288A, 0x57828232, 0x35A00B0B, 0xE8E5A1B3, 0xF1E51979, 0x2CA0B3C1, 0x4E823AF8, 0x93C79040, 0x95E7FA51, 0x48A250E9, 0x2A80D9D0, 0xF7C57368, 0xEEC5CBA2, 0x3380611A, 0x51A2E823, 0x8CE7429B, 0x63A399B7, 0xBEE6330F, 0xDCC4BA36, 0x0181108E, 0x1881A844, 0xC5C402FC, 0xA7E68BC5, 0x7AA3217D, 0x52A0C93F, 0x8FE56387, 0xEDC7EABE, 0x30824006, 0x2982F8CC, 0xF4C75274, 0x96E5DB4D, 0x4BA071F5, 0xA4E4AAD9, 0x79A10061, 0x1B838958, 0xC6C623E0, 0xDFC69B2A, 0x02833192, 0x60A1B8AB, 0xBDE41213, 0xBBC47802, 0x6681D2BA, 0x04A35B83, 0xD9E6F13B, 0xC0E649F1, 0x1DA3E349, 0x7F816A70, 0xA2C4C0C8, 0x4D801BE4, 0x90C5B15C, 0xF2E73865, 0x2FA292DD, 0x36A22A17, 0xEBE780AF, 0x89C50996, 0x5480A32E, 0x8585DDB4, 0x58C0770C, 0x3AE2FE35, 0xE7A7548D, 0xFEA7EC47, 0x23E246FF, 0x41C0CFC6, 0x9C85657E, 0x73C1BE52, 0xAE8414EA, 0xCCA69DD3, 0x11E3376B, 0x08E38FA1, 0xD5A62519, 0xB784AC20, 0x6AC10698, 0x6CE16C89, 0xB1A4C631, 0xD3864F08, 0x0EC3E5B0, 0x17C35D7A, 0xCA86F7C2, 0xA8A47EFB, 0x75E1D443, 0x9AA50F6F, 0x47E0A5D7, 0x25C22CEE, 0xF8878656, 0xE1873E9C, 0x3CC29424, 0x5EE01D1D, 0x83A5B7A5, 0xF90696D8, 0x24433C60, 0x4661B559, 0x9B241FE1, 0x8224A72B, 0x5F610D93, 0x3D4384AA, 0xE0062E12, 0x0F42F53E, 0xD2075F86, 0xB025D6BF, 0x6D607C07, 0x7460C4CD, 0xA9256E75, 0xCB07E74C, 0x16424DF4, 0x106227E5, 0xCD278D5D, 0xAF050464, 0x7240AEDC, 0x6B401616, 0xB605BCAE, 0xD4273597, 0x09629F2F, 0xE6264403, 0x3B63EEBB, 0x59416782, 0x8404CD3A, 0x9D0475F0, 0x4041DF48, 0x22635671, 0xFF26FCC9, 0x2E238253, 0xF36628EB, 0x9144A1D2, 0x4C010B6A, 0x5501B3A0, 0x88441918, 0xEA669021, 0x37233A99, 0xD867E1B5, 0x05224B0D, 0x6700C234, 0xBA45688C, 0xA345D046, 0x7E007AFE, 0x1C22F3C7, 0xC167597F, 0xC747336E, 0x1A0299D6, 0x782010EF, 0xA565BA57, 0xBC65029D, 0x6120A825, 0x0302211C, 0xDE478BA4, 0x31035088, 0xEC46FA30, 0x8E647309, 0x5321D9B1, 0x4A21617B, 0x9764CBC3, 0xF54642FA, 0x2803E842 }; static final int[] T8_4 = new int[] { 0x00000000, 0x38116FAC, 0x7022DF58, 0x4833B0F4, 0xE045BEB0, 0xD854D11C, 0x906761E8, 0xA8760E44, 0xC5670B91, 0xFD76643D, 0xB545D4C9, 0x8D54BB65, 0x2522B521, 0x1D33DA8D, 0x55006A79, 0x6D1105D5, 0x8F2261D3, 0xB7330E7F, 0xFF00BE8B, 0xC711D127, 0x6F67DF63, 0x5776B0CF, 0x1F45003B, 0x27546F97, 0x4A456A42, 0x725405EE, 0x3A67B51A, 0x0276DAB6, 0xAA00D4F2, 0x9211BB5E, 0xDA220BAA, 0xE2336406, 0x1BA8B557, 0x23B9DAFB, 0x6B8A6A0F, 0x539B05A3, 0xFBED0BE7, 0xC3FC644B, 0x8BCFD4BF, 0xB3DEBB13, 0xDECFBEC6, 0xE6DED16A, 0xAEED619E, 0x96FC0E32, 0x3E8A0076, 0x069B6FDA, 0x4EA8DF2E, 0x76B9B082, 0x948AD484, 0xAC9BBB28, 0xE4A80BDC, 0xDCB96470, 0x74CF6A34, 0x4CDE0598, 0x04EDB56C, 0x3CFCDAC0, 0x51EDDF15, 0x69FCB0B9, 0x21CF004D, 0x19DE6FE1, 0xB1A861A5, 0x89B90E09, 0xC18ABEFD, 0xF99BD151, 0x37516AAE, 0x0F400502, 0x4773B5F6, 0x7F62DA5A, 0xD714D41E, 0xEF05BBB2, 0xA7360B46, 0x9F2764EA, 0xF236613F, 0xCA270E93, 0x8214BE67, 0xBA05D1CB, 0x1273DF8F, 0x2A62B023, 0x625100D7, 0x5A406F7B, 0xB8730B7D, 0x806264D1, 0xC851D425, 0xF040BB89, 0x5836B5CD, 0x6027DA61, 0x28146A95, 0x10050539, 0x7D1400EC, 0x45056F40, 0x0D36DFB4, 0x3527B018, 0x9D51BE5C, 0xA540D1F0, 0xED736104, 0xD5620EA8, 0x2CF9DFF9, 0x14E8B055, 0x5CDB00A1, 0x64CA6F0D, 0xCCBC6149, 0xF4AD0EE5, 0xBC9EBE11, 0x848FD1BD, 0xE99ED468, 0xD18FBBC4, 0x99BC0B30, 0xA1AD649C, 0x09DB6AD8, 0x31CA0574, 0x79F9B580, 0x41E8DA2C, 0xA3DBBE2A, 0x9BCAD186, 0xD3F96172, 0xEBE80EDE, 0x439E009A, 0x7B8F6F36, 0x33BCDFC2, 0x0BADB06E, 0x66BCB5BB, 0x5EADDA17, 0x169E6AE3, 0x2E8F054F, 0x86F90B0B, 0xBEE864A7, 0xF6DBD453, 0xCECABBFF, 0x6EA2D55C, 0x56B3BAF0, 0x1E800A04, 0x269165A8, 0x8EE76BEC, 0xB6F60440, 0xFEC5B4B4, 0xC6D4DB18, 0xABC5DECD, 0x93D4B161, 0xDBE70195, 0xE3F66E39, 0x4B80607D, 0x73910FD1, 0x3BA2BF25, 0x03B3D089, 0xE180B48F, 0xD991DB23, 0x91A26BD7, 0xA9B3047B, 0x01C50A3F, 0x39D46593, 0x71E7D567, 0x49F6BACB, 0x24E7BF1E, 0x1CF6D0B2, 0x54C56046, 0x6CD40FEA, 0xC4A201AE, 0xFCB36E02, 0xB480DEF6, 0x8C91B15A, 0x750A600B, 0x4D1B0FA7, 0x0528BF53, 0x3D39D0FF, 0x954FDEBB, 0xAD5EB117, 0xE56D01E3, 0xDD7C6E4F, 0xB06D6B9A, 0x887C0436, 0xC04FB4C2, 0xF85EDB6E, 0x5028D52A, 0x6839BA86, 0x200A0A72, 0x181B65DE, 0xFA2801D8, 0xC2396E74, 0x8A0ADE80, 0xB21BB12C, 0x1A6DBF68, 0x227CD0C4, 0x6A4F6030, 0x525E0F9C, 0x3F4F0A49, 0x075E65E5, 0x4F6DD511, 0x777CBABD, 0xDF0AB4F9, 0xE71BDB55, 0xAF286BA1, 0x9739040D, 0x59F3BFF2, 0x61E2D05E, 0x29D160AA, 0x11C00F06, 0xB9B60142, 0x81A76EEE, 0xC994DE1A, 0xF185B1B6, 0x9C94B463, 0xA485DBCF, 0xECB66B3B, 0xD4A70497, 0x7CD10AD3, 0x44C0657F, 0x0CF3D58B, 0x34E2BA27, 0xD6D1DE21, 0xEEC0B18D, 0xA6F30179, 0x9EE26ED5, 0x36946091, 0x0E850F3D, 0x46B6BFC9, 0x7EA7D065, 0x13B6D5B0, 0x2BA7BA1C, 0x63940AE8, 0x5B856544, 0xF3F36B00, 0xCBE204AC, 0x83D1B458, 0xBBC0DBF4, 0x425B0AA5, 0x7A4A6509, 0x3279D5FD, 0x0A68BA51, 0xA21EB415, 0x9A0FDBB9, 0xD23C6B4D, 0xEA2D04E1, 0x873C0134, 0xBF2D6E98, 0xF71EDE6C, 0xCF0FB1C0, 0x6779BF84, 0x5F68D028, 0x175B60DC, 0x2F4A0F70, 0xCD796B76, 0xF56804DA, 0xBD5BB42E, 0x854ADB82, 0x2D3CD5C6, 0x152DBA6A, 0x5D1E0A9E, 0x650F6532, 0x081E60E7, 0x300F0F4B, 0x783CBFBF, 0x402DD013, 0xE85BDE57, 0xD04AB1FB, 0x9879010F, 0xA0686EA3 }; static final int[] T8_5 = new int[] { 0x00000000, 0xEF306B19, 0xDB8CA0C3, 0x34BCCBDA, 0xB2F53777, 0x5DC55C6E, 0x697997B4, 0x8649FCAD, 0x6006181F, 0x8F367306, 0xBB8AB8DC, 0x54BAD3C5, 0xD2F32F68, 0x3DC34471, 0x097F8FAB, 0xE64FE4B2, 0xC00C303E, 0x2F3C5B27, 0x1B8090FD, 0xF4B0FBE4, 0x72F90749, 0x9DC96C50, 0xA975A78A, 0x4645CC93, 0xA00A2821, 0x4F3A4338, 0x7B8688E2, 0x94B6E3FB, 0x12FF1F56, 0xFDCF744F, 0xC973BF95, 0x2643D48C, 0x85F4168D, 0x6AC47D94, 0x5E78B64E, 0xB148DD57, 0x370121FA, 0xD8314AE3, 0xEC8D8139, 0x03BDEA20, 0xE5F20E92, 0x0AC2658B, 0x3E7EAE51, 0xD14EC548, 0x570739E5, 0xB83752FC, 0x8C8B9926, 0x63BBF23F, 0x45F826B3, 0xAAC84DAA, 0x9E748670, 0x7144ED69, 0xF70D11C4, 0x183D7ADD, 0x2C81B107, 0xC3B1DA1E, 0x25FE3EAC, 0xCACE55B5, 0xFE729E6F, 0x1142F576, 0x970B09DB, 0x783B62C2, 0x4C87A918, 0xA3B7C201, 0x0E045BEB, 0xE13430F2, 0xD588FB28, 0x3AB89031, 0xBCF16C9C, 0x53C10785, 0x677DCC5F, 0x884DA746, 0x6E0243F4, 0x813228ED, 0xB58EE337, 0x5ABE882E, 0xDCF77483, 0x33C71F9A, 0x077BD440, 0xE84BBF59, 0xCE086BD5, 0x213800CC, 0x1584CB16, 0xFAB4A00F, 0x7CFD5CA2, 0x93CD37BB, 0xA771FC61, 0x48419778, 0xAE0E73CA, 0x413E18D3, 0x7582D309, 0x9AB2B810, 0x1CFB44BD, 0xF3CB2FA4, 0xC777E47E, 0x28478F67, 0x8BF04D66, 0x64C0267F, 0x507CEDA5, 0xBF4C86BC, 0x39057A11, 0xD6351108, 0xE289DAD2, 0x0DB9B1CB, 0xEBF65579, 0x04C63E60, 0x307AF5BA, 0xDF4A9EA3, 0x5903620E, 0xB6330917, 0x828FC2CD, 0x6DBFA9D4, 0x4BFC7D58, 0xA4CC1641, 0x9070DD9B, 0x7F40B682, 0xF9094A2F, 0x16392136, 0x2285EAEC, 0xCDB581F5, 0x2BFA6547, 0xC4CA0E5E, 0xF076C584, 0x1F46AE9D, 0x990F5230, 0x763F3929, 0x4283F2F3, 0xADB399EA, 0x1C08B7D6, 0xF338DCCF, 0xC7841715, 0x28B47C0C, 0xAEFD80A1, 0x41CDEBB8, 0x75712062, 0x9A414B7B, 0x7C0EAFC9, 0x933EC4D0, 0xA7820F0A, 0x48B26413, 0xCEFB98BE, 0x21CBF3A7, 0x1577387D, 0xFA475364, 0xDC0487E8, 0x3334ECF1, 0x0788272B, 0xE8B84C32, 0x6EF1B09F, 0x81C1DB86, 0xB57D105C, 0x5A4D7B45, 0xBC029FF7, 0x5332F4EE, 0x678E3F34, 0x88BE542D, 0x0EF7A880, 0xE1C7C399, 0xD57B0843, 0x3A4B635A, 0x99FCA15B, 0x76CCCA42, 0x42700198, 0xAD406A81, 0x2B09962C, 0xC439FD35, 0xF08536EF, 0x1FB55DF6, 0xF9FAB944, 0x16CAD25D, 0x22761987, 0xCD46729E, 0x4B0F8E33, 0xA43FE52A, 0x90832EF0, 0x7FB345E9, 0x59F09165, 0xB6C0FA7C, 0x827C31A6, 0x6D4C5ABF, 0xEB05A612, 0x0435CD0B, 0x308906D1, 0xDFB96DC8, 0x39F6897A, 0xD6C6E263, 0xE27A29B9, 0x0D4A42A0, 0x8B03BE0D, 0x6433D514, 0x508F1ECE, 0xBFBF75D7, 0x120CEC3D, 0xFD3C8724, 0xC9804CFE, 0x26B027E7, 0xA0F9DB4A, 0x4FC9B053, 0x7B757B89, 0x94451090, 0x720AF422, 0x9D3A9F3B, 0xA98654E1, 0x46B63FF8, 0xC0FFC355, 0x2FCFA84C, 0x1B736396, 0xF443088F, 0xD200DC03, 0x3D30B71A, 0x098C7CC0, 0xE6BC17D9, 0x60F5EB74, 0x8FC5806D, 0xBB794BB7, 0x544920AE, 0xB206C41C, 0x5D36AF05, 0x698A64DF, 0x86BA0FC6, 0x00F3F36B, 0xEFC39872, 0xDB7F53A8, 0x344F38B1, 0x97F8FAB0, 0x78C891A9, 0x4C745A73, 0xA344316A, 0x250DCDC7, 0xCA3DA6DE, 0xFE816D04, 0x11B1061D, 0xF7FEE2AF, 0x18CE89B6, 0x2C72426C, 0xC3422975, 0x450BD5D8, 0xAA3BBEC1, 0x9E87751B, 0x71B71E02, 0x57F4CA8E, 0xB8C4A197, 0x8C786A4D, 0x63480154, 0xE501FDF9, 0x0A3196E0, 0x3E8D5D3A, 0xD1BD3623, 0x37F2D291, 0xD8C2B988, 0xEC7E7252, 0x034E194B, 0x8507E5E6, 0x6A378EFF, 0x5E8B4525, 0xB1BB2E3C }; static final int[] T8_6 = new int[] { 0x00000000, 0x68032CC8, 0xD0065990, 0xB8057558, 0xA5E0C5D1, 0xCDE3E919, 0x75E69C41, 0x1DE5B089, 0x4E2DFD53, 0x262ED19B, 0x9E2BA4C3, 0xF628880B, 0xEBCD3882, 0x83CE144A, 0x3BCB6112, 0x53C84DDA, 0x9C5BFAA6, 0xF458D66E, 0x4C5DA336, 0x245E8FFE, 0x39BB3F77, 0x51B813BF, 0xE9BD66E7, 0x81BE4A2F, 0xD27607F5, 0xBA752B3D, 0x02705E65, 0x6A7372AD, 0x7796C224, 0x1F95EEEC, 0xA7909BB4, 0xCF93B77C, 0x3D5B83BD, 0x5558AF75, 0xED5DDA2D, 0x855EF6E5, 0x98BB466C, 0xF0B86AA4, 0x48BD1FFC, 0x20BE3334, 0x73767EEE, 0x1B755226, 0xA370277E, 0xCB730BB6, 0xD696BB3F, 0xBE9597F7, 0x0690E2AF, 0x6E93CE67, 0xA100791B, 0xC90355D3, 0x7106208B, 0x19050C43, 0x04E0BCCA, 0x6CE39002, 0xD4E6E55A, 0xBCE5C992, 0xEF2D8448, 0x872EA880, 0x3F2BDDD8, 0x5728F110, 0x4ACD4199, 0x22CE6D51, 0x9ACB1809, 0xF2C834C1, 0x7AB7077A, 0x12B42BB2, 0xAAB15EEA, 0xC2B27222, 0xDF57C2AB, 0xB754EE63, 0x0F519B3B, 0x6752B7F3, 0x349AFA29, 0x5C99D6E1, 0xE49CA3B9, 0x8C9F8F71, 0x917A3FF8, 0xF9791330, 0x417C6668, 0x297F4AA0, 0xE6ECFDDC, 0x8EEFD114, 0x36EAA44C, 0x5EE98884, 0x430C380D, 0x2B0F14C5, 0x930A619D, 0xFB094D55, 0xA8C1008F, 0xC0C22C47, 0x78C7591F, 0x10C475D7, 0x0D21C55E, 0x6522E996, 0xDD279CCE, 0xB524B006, 0x47EC84C7, 0x2FEFA80F, 0x97EADD57, 0xFFE9F19F, 0xE20C4116, 0x8A0F6DDE, 0x320A1886, 0x5A09344E, 0x09C17994, 0x61C2555C, 0xD9C72004, 0xB1C40CCC, 0xAC21BC45, 0xC422908D, 0x7C27E5D5, 0x1424C91D, 0xDBB77E61, 0xB3B452A9, 0x0BB127F1, 0x63B20B39, 0x7E57BBB0, 0x16549778, 0xAE51E220, 0xC652CEE8, 0x959A8332, 0xFD99AFFA, 0x459CDAA2, 0x2D9FF66A, 0x307A46E3, 0x58796A2B, 0xE07C1F73, 0x887F33BB, 0xF56E0EF4, 0x9D6D223C, 0x25685764, 0x4D6B7BAC, 0x508ECB25, 0x388DE7ED, 0x808892B5, 0xE88BBE7D, 0xBB43F3A7, 0xD340DF6F, 0x6B45AA37, 0x034686FF, 0x1EA33676, 0x76A01ABE, 0xCEA56FE6, 0xA6A6432E, 0x6935F452, 0x0136D89A, 0xB933ADC2, 0xD130810A, 0xCCD53183, 0xA4D61D4B, 0x1CD36813, 0x74D044DB, 0x27180901, 0x4F1B25C9, 0xF71E5091, 0x9F1D7C59, 0x82F8CCD0, 0xEAFBE018, 0x52FE9540, 0x3AFDB988, 0xC8358D49, 0xA036A181, 0x1833D4D9, 0x7030F811, 0x6DD54898, 0x05D66450, 0xBDD31108, 0xD5D03DC0, 0x8618701A, 0xEE1B5CD2, 0x561E298A, 0x3E1D0542, 0x23F8B5CB, 0x4BFB9903, 0xF3FEEC5B, 0x9BFDC093, 0x546E77EF, 0x3C6D5B27, 0x84682E7F, 0xEC6B02B7, 0xF18EB23E, 0x998D9EF6, 0x2188EBAE, 0x498BC766, 0x1A438ABC, 0x7240A674, 0xCA45D32C, 0xA246FFE4, 0xBFA34F6D, 0xD7A063A5, 0x6FA516FD, 0x07A63A35, 0x8FD9098E, 0xE7DA2546, 0x5FDF501E, 0x37DC7CD6, 0x2A39CC5F, 0x423AE097, 0xFA3F95CF, 0x923CB907, 0xC1F4F4DD, 0xA9F7D815, 0x11F2AD4D, 0x79F18185, 0x6414310C, 0x0C171DC4, 0xB412689C, 0xDC114454, 0x1382F328, 0x7B81DFE0, 0xC384AAB8, 0xAB878670, 0xB66236F9, 0xDE611A31, 0x66646F69, 0x0E6743A1, 0x5DAF0E7B, 0x35AC22B3, 0x8DA957EB, 0xE5AA7B23, 0xF84FCBAA, 0x904CE762, 0x2849923A, 0x404ABEF2, 0xB2828A33, 0xDA81A6FB, 0x6284D3A3, 0x0A87FF6B, 0x17624FE2, 0x7F61632A, 0xC7641672, 0xAF673ABA, 0xFCAF7760, 0x94AC5BA8, 0x2CA92EF0, 0x44AA0238, 0x594FB2B1, 0x314C9E79, 0x8949EB21, 0xE14AC7E9, 0x2ED97095, 0x46DA5C5D, 0xFEDF2905, 0x96DC05CD, 0x8B39B544, 0xE33A998C, 0x5B3FECD4, 0x333CC01C, 0x60F48DC6, 0x08F7A10E, 0xB0F2D456, 0xD8F1F89E, 0xC5144817, 0xAD1764DF, 0x15121187, 0x7D113D4F }; static final int[] T8_7 = new int[] { 0x00000000, 0x493C7D27, 0x9278FA4E, 0xDB448769, 0x211D826D, 0x6821FF4A, 0xB3657823, 0xFA590504, 0x423B04DA, 0x0B0779FD, 0xD043FE94, 0x997F83B3, 0x632686B7, 0x2A1AFB90, 0xF15E7CF9, 0xB86201DE, 0x847609B4, 0xCD4A7493, 0x160EF3FA, 0x5F328EDD, 0xA56B8BD9, 0xEC57F6FE, 0x37137197, 0x7E2F0CB0, 0xC64D0D6E, 0x8F717049, 0x5435F720, 0x1D098A07, 0xE7508F03, 0xAE6CF224, 0x7528754D, 0x3C14086A, 0x0D006599, 0x443C18BE, 0x9F789FD7, 0xD644E2F0, 0x2C1DE7F4, 0x65219AD3, 0xBE651DBA, 0xF759609D, 0x4F3B6143, 0x06071C64, 0xDD439B0D, 0x947FE62A, 0x6E26E32E, 0x271A9E09, 0xFC5E1960, 0xB5626447, 0x89766C2D, 0xC04A110A, 0x1B0E9663, 0x5232EB44, 0xA86BEE40, 0xE1579367, 0x3A13140E, 0x732F6929, 0xCB4D68F7, 0x827115D0, 0x593592B9, 0x1009EF9E, 0xEA50EA9A, 0xA36C97BD, 0x782810D4, 0x31146DF3, 0x1A00CB32, 0x533CB615, 0x8878317C, 0xC1444C5B, 0x3B1D495F, 0x72213478, 0xA965B311, 0xE059CE36, 0x583BCFE8, 0x1107B2CF, 0xCA4335A6, 0x837F4881, 0x79264D85, 0x301A30A2, 0xEB5EB7CB, 0xA262CAEC, 0x9E76C286, 0xD74ABFA1, 0x0C0E38C8, 0x453245EF, 0xBF6B40EB, 0xF6573DCC, 0x2D13BAA5, 0x642FC782, 0xDC4DC65C, 0x9571BB7B, 0x4E353C12, 0x07094135, 0xFD504431, 0xB46C3916, 0x6F28BE7F, 0x2614C358, 0x1700AEAB, 0x5E3CD38C, 0x857854E5, 0xCC4429C2, 0x361D2CC6, 0x7F2151E1, 0xA465D688, 0xED59ABAF, 0x553BAA71, 0x1C07D756, 0xC743503F, 0x8E7F2D18, 0x7426281C, 0x3D1A553B, 0xE65ED252, 0xAF62AF75, 0x9376A71F, 0xDA4ADA38, 0x010E5D51, 0x48322076, 0xB26B2572, 0xFB575855, 0x2013DF3C, 0x692FA21B, 0xD14DA3C5, 0x9871DEE2, 0x4335598B, 0x0A0924AC, 0xF05021A8, 0xB96C5C8F, 0x6228DBE6, 0x2B14A6C1, 0x34019664, 0x7D3DEB43, 0xA6796C2A, 0xEF45110D, 0x151C1409, 0x5C20692E, 0x8764EE47, 0xCE589360, 0x763A92BE, 0x3F06EF99, 0xE44268F0, 0xAD7E15D7, 0x572710D3, 0x1E1B6DF4, 0xC55FEA9D, 0x8C6397BA, 0xB0779FD0, 0xF94BE2F7, 0x220F659E, 0x6B3318B9, 0x916A1DBD, 0xD856609A, 0x0312E7F3, 0x4A2E9AD4, 0xF24C9B0A, 0xBB70E62D, 0x60346144, 0x29081C63, 0xD3511967, 0x9A6D6440, 0x4129E329, 0x08159E0E, 0x3901F3FD, 0x703D8EDA, 0xAB7909B3, 0xE2457494, 0x181C7190, 0x51200CB7, 0x8A648BDE, 0xC358F6F9, 0x7B3AF727, 0x32068A00, 0xE9420D69, 0xA07E704E, 0x5A27754A, 0x131B086D, 0xC85F8F04, 0x8163F223, 0xBD77FA49, 0xF44B876E, 0x2F0F0007, 0x66337D20, 0x9C6A7824, 0xD5560503, 0x0E12826A, 0x472EFF4D, 0xFF4CFE93, 0xB67083B4, 0x6D3404DD, 0x240879FA, 0xDE517CFE, 0x976D01D9, 0x4C2986B0, 0x0515FB97, 0x2E015D56, 0x673D2071, 0xBC79A718, 0xF545DA3F, 0x0F1CDF3B, 0x4620A21C, 0x9D642575, 0xD4585852, 0x6C3A598C, 0x250624AB, 0xFE42A3C2, 0xB77EDEE5, 0x4D27DBE1, 0x041BA6C6, 0xDF5F21AF, 0x96635C88, 0xAA7754E2, 0xE34B29C5, 0x380FAEAC, 0x7133D38B, 0x8B6AD68F, 0xC256ABA8, 0x19122CC1, 0x502E51E6, 0xE84C5038, 0xA1702D1F, 0x7A34AA76, 0x3308D751, 0xC951D255, 0x806DAF72, 0x5B29281B, 0x1215553C, 0x230138CF, 0x6A3D45E8, 0xB179C281, 0xF845BFA6, 0x021CBAA2, 0x4B20C785, 0x906440EC, 0xD9583DCB, 0x613A3C15, 0x28064132, 0xF342C65B, 0xBA7EBB7C, 0x4027BE78, 0x091BC35F, 0xD25F4436, 0x9B633911, 0xA777317B, 0xEE4B4C5C, 0x350FCB35, 0x7C33B612, 0x866AB316, 0xCF56CE31, 0x14124958, 0x5D2E347F, 0xE54C35A1, 0xAC704886, 0x7734CFEF, 0x3E08B2C8, 0xC451B7CC, 0x8D6DCAEB, 0x56294D82, 0x1F1530A5 }; }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package nginx.clojure.asm.commons; import java.util.HashMap; import java.util.Map; import nginx.clojure.asm.Type; /** * A named method descriptor. * * @author Juozas Baliuka * @author Chris Nokleberg * @author Eric Bruneton */ public class Method { /** * The method name. */ private final String name; /** * The method descriptor. */ private final String desc; /** * Maps primitive Java type names to their descriptors. */ private static final Map<String, String> DESCRIPTORS; static { DESCRIPTORS = new HashMap<String, String>(); DESCRIPTORS.put("void", "V"); DESCRIPTORS.put("byte", "B"); DESCRIPTORS.put("char", "C"); DESCRIPTORS.put("double", "D"); DESCRIPTORS.put("float", "F"); DESCRIPTORS.put("int", "I"); DESCRIPTORS.put("long", "J"); DESCRIPTORS.put("short", "S"); DESCRIPTORS.put("boolean", "Z"); } /** * Creates a new {@link Method}. * * @param name * the method's name. * @param desc * the method's descriptor. */ public Method(final String name, final String desc) { this.name = name; this.desc = desc; } /** * Creates a new {@link Method}. * * @param name * the method's name. * @param returnType * the method's return type. * @param argumentTypes * the method's argument types. */ public Method(final String name, final Type returnType, final Type[] argumentTypes) { this(name, Type.getMethodDescriptor(returnType, argumentTypes)); } /** * Creates a new {@link Method}. * * @param m * a java.lang.reflect method descriptor * @return a {@link Method} corresponding to the given Java method * declaration. */ public static Method getMethod(java.lang.reflect.Method m) { return new Method(m.getName(), Type.getMethodDescriptor(m)); } /** * Creates a new {@link Method}. * * @param c * a java.lang.reflect constructor descriptor * @return a {@link Method} corresponding to the given Java constructor * declaration. */ public static Method getMethod(java.lang.reflect.Constructor<?> c) { return new Method("<init>", Type.getConstructorDescriptor(c)); } /** * Returns a {@link Method} corresponding to the given Java method * declaration. * * @param method * a Java method declaration, without argument names, of the form * "returnType name (argumentType1, ... argumentTypeN)", where * the types are in plain Java (e.g. "int", "float", * "java.util.List", ...). Classes of the java.lang package can * be specified by their unqualified name; all other classes * names must be fully qualified. * @return a {@link Method} corresponding to the given Java method * declaration. * @throws IllegalArgumentException * if <code>method</code> could not get parsed. */ public static Method getMethod(final String method) throws IllegalArgumentException { return getMethod(method, false); } /** * Returns a {@link Method} corresponding to the given Java method * declaration. * * @param method * a Java method declaration, without argument names, of the form * "returnType name (argumentType1, ... argumentTypeN)", where * the types are in plain Java (e.g. "int", "float", * "java.util.List", ...). Classes of the java.lang package may * be specified by their unqualified name, depending on the * defaultPackage argument; all other classes names must be fully * qualified. * @param defaultPackage * true if unqualified class names belong to the default package, * or false if they correspond to java.lang classes. For instance * "Object" means "Object" if this option is true, or * "java.lang.Object" otherwise. * @return a {@link Method} corresponding to the given Java method * declaration. * @throws IllegalArgumentException * if <code>method</code> could not get parsed. */ public static Method getMethod(final String method, final boolean defaultPackage) throws IllegalArgumentException { int space = method.indexOf(' '); int start = method.indexOf('(', space) + 1; int end = method.indexOf(')', start); if (space == -1 || start == -1 || end == -1) { throw new IllegalArgumentException(); } String returnType = method.substring(0, space); String methodName = method.substring(space + 1, start - 1).trim(); StringBuffer sb = new StringBuffer(); sb.append('('); int p; do { String s; p = method.indexOf(',', start); if (p == -1) { s = map(method.substring(start, end).trim(), defaultPackage); } else { s = map(method.substring(start, p).trim(), defaultPackage); start = p + 1; } sb.append(s); } while (p != -1); sb.append(')'); sb.append(map(returnType, defaultPackage)); return new Method(methodName, sb.toString()); } private static String map(final String type, final boolean defaultPackage) { if ("".equals(type)) { return type; } StringBuffer sb = new StringBuffer(); int index = 0; while ((index = type.indexOf("[]", index) + 1) > 0) { sb.append('['); } String t = type.substring(0, type.length() - sb.length() * 2); String desc = DESCRIPTORS.get(t); if (desc != null) { sb.append(desc); } else { sb.append('L'); if (t.indexOf('.') < 0) { if (!defaultPackage) { sb.append("java/lang/"); } sb.append(t); } else { sb.append(t.replace('.', '/')); } sb.append(';'); } return sb.toString(); } /** * Returns the name of the method described by this object. * * @return the name of the method described by this object. */ public String getName() { return name; } /** * Returns the descriptor of the method described by this object. * * @return the descriptor of the method described by this object. */ public String getDescriptor() { return desc; } /** * Returns the return type of the method described by this object. * * @return the return type of the method described by this object. */ public Type getReturnType() { return Type.getReturnType(desc); } /** * Returns the argument types of the method described by this object. * * @return the argument types of the method described by this object. */ public Type[] getArgumentTypes() { return Type.getArgumentTypes(desc); } @Override public String toString() { return name + desc; } @Override public boolean equals(final Object o) { if (!(o instanceof Method)) { return false; } Method other = (Method) o; return name.equals(other.name) && desc.equals(other.desc); } @Override public int hashCode() { return name.hashCode() ^ desc.hashCode(); } }
package com.wcohen.ss.expt; import com.wcohen.ss.*; import com.wcohen.ss.api.*; import java.util.*; import java.io.*; /** * Holds data for evaluating a distance metric. */ public class MatchData { private Map sourceLists; private ArrayList sourceNames; private String filename; private static final boolean KEEP_OLD_ITERATION_BUG = System.getProperty("ss.keepOldIterationBug")!=null; /** * Read match data from a file. Format should be: * sourceRelation TAB instanceID TAB field1 TAB ... fieldn LF */ public MatchData(String filename) throws InputFormatException { this.filename = filename; sourceNames = new ArrayList(); sourceLists = new HashMap(); try { BufferedReader in = new BufferedReader(new FileReader(filename)); String line; int lineNum = 0; while ((line = in.readLine())!=null) { lineNum++; String tok[] = line.split("\t",-1); int toklen = tok.length; if(toklen < 1) throw new InputFormatException(filename,lineNum,"no source"); String src = tok[0]; if (toklen < 2) throw new InputFormatException(filename,lineNum,"no id"); String id = tok[1]; if (toklen < 3) throw new InputFormatException(filename,lineNum,"no text fields"); String text = tok[2]; for(int i = 3;i < toklen;i++){ text += "\t" + tok[i]; } addInstance(src,id,text); } in.close(); } catch (IOException e) { throw new InputFormatException(filename,0,e.toString()); } } public MatchData() { this.filename = "none"; sourceNames = new ArrayList(); sourceLists = new HashMap(); } /** Add a single instance, with given src and id, to the datafile */ public void addInstance(String src,String id,String text) { Instance inst = new Instance(src,id,text); ArrayList list = (ArrayList)sourceLists.get(src); if (list==null) { list = new ArrayList(); sourceLists.put(src,list); sourceNames.add(src); } list.add(inst); } /** Number of sources in data set */ public int numSources() { return sourceNames.size(); } /** Get string identifier for i-th source */ public String getSource(int i) { return (String)sourceNames.get(i); } /** Number of records for source with given string id */ public int numInstances(String src) { return ((ArrayList)sourceLists.get(src)).size(); } /** Get the j-th record for the named source. */ public Instance getInstance(String src, int j) { return (Instance)((ArrayList)sourceLists.get(src)).get(j); } public StringWrapperIterator getIterator() { return new MatchIterator(this); } public String getFilename() { return filename; } public String toString() { StringBuffer buf = new StringBuffer(); for (int i=0; i<numSources(); i++) { String src = getSource(i); for (int j=0; j<numInstances(src); j++) { Instance inst = getInstance(src,j); buf.append(inst.toString()+"\n"); } } return buf.toString(); } /** A single item (aka record, string, etc) to match against others. * An item has an id (for evaluating correctness of a match), a * source (which relation its from), and a text field. Text is * stored as a StringWrapper so that it can be preprocessed, if * necessary. */ public static class Instance extends BasicStringWrapper implements SourcedStringWrapper, IdentifiedStringWrapper { private final String source; private final String id; public Instance(String source, String id, String text) { super(text); this.source = source.trim(); this.id = id.trim(); } public String getSource() { return source; } public String getId() { return id; } public boolean sameId(Instance b) { return id!=null && b.id!=null && id.equals(b.id); } public String toString() { return "[src: '"+source+"' id: '"+id+"' unwrapped: '"+unwrap()+"']"; } } /** Iterates over all stored StringWrappers */ static public class MatchIterator implements SourcedStringWrapperIterator { private int sourceCursor,instanceCursor; private String src; // caches getSource(sourceCursor) private MatchData data; public MatchIterator(MatchData data) { this.data = data; sourceCursor = 0; instanceCursor = 0; src = data.getSource(sourceCursor); } /** Not implemented. */ public void remove() { throw new IllegalStateException("remove not implemented"); } /** Return the next StringWrapper. */ public SourcedStringWrapper nextSourcedStringWrapper() { return (SourcedStringWrapper)next(); } /** Return the next StringWrapper. */ public StringWrapper nextStringWrapper() { return (StringWrapper)next(); } public boolean hasNext() { return sourceCursor<data.numSources() && instanceCursor<data.numInstances(src); } /** Returns the next StringWrapper as an object. */ public Object next() { Instance inst = data.getInstance( src, instanceCursor++ ); if (KEEP_OLD_ITERATION_BUG && instanceCursor>data.numInstances(src)) { sourceCursor++; instanceCursor=0; if (sourceCursor<data.numSources()) src = data.getSource(sourceCursor); } if (!KEEP_OLD_ITERATION_BUG && instanceCursor>=data.numInstances(src)) { sourceCursor++; instanceCursor=0; if (sourceCursor<data.numSources()) src = data.getSource(sourceCursor); } return inst; } } /** Signals an incorrectly formatted MatchData file. */ public static class InputFormatException extends RuntimeException { public InputFormatException(String file, int line, String msg) { super("line "+line+" of file "+file+": "+msg); } } public static void main(String[] argv) { try { MatchData md = new MatchData(argv[0]); System.out.println("Dump:"); System.out.println(md.toString()); System.out.println(); System.out.println("Iteration:"); for (Iterator i = md.getIterator(); i.hasNext(); ) { System.out.println(i.next().toString()); } } catch (Exception e) { e.printStackTrace(); } } }
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved. package jodd.util.sort; /** * <code>ComparableTimSort</code> from JDK7. * Changes: * <ul> * <li>reformatted</li> * <li>single sort method</li> * <li>no range check</li> * <li>asserts removed</li> * <li>comments removed</li> * </ul> * * @author Josh Bloch */ public class ComparableTimSort { private static final int MIN_MERGE = 32; private final Object[] a; private static final int MIN_GALLOP = 7; private int minGallop = MIN_GALLOP; private static final int INITIAL_TMP_STORAGE_LENGTH = 256; private Object[] tmp; private int stackSize = 0; // Number of pending runs on stack private final int[] runBase; private final int[] runLen; private ComparableTimSort(Object[] a) { this.a = a; // Allocate temp storage (which may be increased later if necessary) int len = a.length; @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) Object[] newArray = new Object[len < 2 * INITIAL_TMP_STORAGE_LENGTH ? len >>> 1 : INITIAL_TMP_STORAGE_LENGTH]; tmp = newArray; int stackLen = (len < 120 ? 5 : len < 1542 ? 10 : len < 119151 ? 19 : 40); runBase = new int[stackLen]; runLen = new int[stackLen]; } public static void sort(Object[] a) { int lo = 0; int hi = a.length; int nRemaining = hi - lo; if (nRemaining < 2) return; // Arrays of size 0 and 1 are always sorted // If array is small, do a "mini-TimSort" with no merges if (nRemaining < MIN_MERGE) { int initRunLen = countRunAndMakeAscending(a, lo, hi); binarySort(a, lo, hi, lo + initRunLen); return; } ComparableTimSort ts = new ComparableTimSort(a); int minRun = minRunLength(nRemaining); do { // Identify next run int runLen = countRunAndMakeAscending(a, lo, hi); // If run is short, extend to min(minRun, nRemaining) if (runLen < minRun) { int force = nRemaining <= minRun ? nRemaining : minRun; binarySort(a, lo, lo + force, lo + runLen); runLen = force; } // Push run onto pending-run stack, and maybe merge ts.pushRun(lo, runLen); ts.mergeCollapse(); // Advance to find next run lo += runLen; nRemaining -= runLen; } while (nRemaining != 0); // Merge all remaining runs to complete sort ts.mergeForceCollapse(); } @SuppressWarnings("fallthrough") private static void binarySort(Object[] a, int lo, int hi, int start) { if (start == lo) start++; for (; start < hi; start++) { @SuppressWarnings("unchecked") Comparable<Object> pivot = (Comparable) a[start]; // Set left (and right) to the index where a[start] (pivot) belongs int left = lo; int right = start; while (left < right) { int mid = (left + right) >>> 1; if (pivot.compareTo(a[mid]) < 0) right = mid; else left = mid + 1; } int n = start - left; // The number of elements to move // Switch is just an optimization for arraycopy in default case switch (n) { case 2: a[left + 2] = a[left + 1]; case 1: a[left + 1] = a[left]; break; default: System.arraycopy(a, left, a, left + 1, n); } a[left] = pivot; } } @SuppressWarnings("unchecked") private static int countRunAndMakeAscending(Object[] a, int lo, int hi) { int runHi = lo + 1; if (runHi == hi) return 1; // Find end of run, and reverse range if descending if (((Comparable) a[runHi++]).compareTo(a[lo]) < 0) { // Descending while (runHi < hi && ((Comparable) a[runHi]).compareTo(a[runHi - 1]) < 0) runHi++; reverseRange(a, lo, runHi); } else { // Ascending while (runHi < hi && ((Comparable) a[runHi]).compareTo(a[runHi - 1]) >= 0) runHi++; } return runHi - lo; } private static void reverseRange(Object[] a, int lo, int hi) { hi--; while (lo < hi) { Object t = a[lo]; a[lo++] = a[hi]; a[hi--] = t; } } private static int minRunLength(int n) { int r = 0; // Becomes 1 if any 1 bits are shifted off while (n >= MIN_MERGE) { r |= (n & 1); n >>= 1; } return n + r; } private void pushRun(int runBase, int runLen) { this.runBase[stackSize] = runBase; this.runLen[stackSize] = runLen; stackSize++; } private void mergeCollapse() { while (stackSize > 1) { int n = stackSize - 2; if (n > 0 && runLen[n - 1] <= runLen[n] + runLen[n + 1]) { if (runLen[n - 1] < runLen[n + 1]) n--; mergeAt(n); } else if (runLen[n] <= runLen[n + 1]) { mergeAt(n); } else { break; // Invariant is established } } } private void mergeForceCollapse() { while (stackSize > 1) { int n = stackSize - 2; if (n > 0 && runLen[n - 1] < runLen[n + 1]) n--; mergeAt(n); } } @SuppressWarnings("unchecked") private void mergeAt(int i) { int base1 = runBase[i]; int len1 = runLen[i]; int base2 = runBase[i + 1]; int len2 = runLen[i + 1]; runLen[i] = len1 + len2; if (i == stackSize - 3) { runBase[i + 1] = runBase[i + 2]; runLen[i + 1] = runLen[i + 2]; } stackSize--; int k = gallopRight((Comparable<Object>) a[base2], a, base1, len1, 0); base1 += k; len1 -= k; if (len1 == 0) return; len2 = gallopLeft((Comparable<Object>) a[base1 + len1 - 1], a, base2, len2, len2 - 1); if (len2 == 0) return; // Merge remaining runs, using tmp array with min(len1, len2) elements if (len1 <= len2) mergeLo(base1, len1, base2, len2); else mergeHi(base1, len1, base2, len2); } private static int gallopLeft(Comparable<Object> key, Object[] a, int base, int len, int hint) { int lastOfs = 0; int ofs = 1; if (key.compareTo(a[base + hint]) > 0) { // Gallop right until a[base+hint+lastOfs] < key <= a[base+hint+ofs] int maxOfs = len - hint; while (ofs < maxOfs && key.compareTo(a[base + hint + ofs]) > 0) { lastOfs = ofs; ofs = (ofs << 1) + 1; if (ofs <= 0) // int overflow ofs = maxOfs; } if (ofs > maxOfs) ofs = maxOfs; // Make offsets relative to base lastOfs += hint; ofs += hint; } else { // key <= a[base + hint] // Gallop left until a[base+hint-ofs] < key <= a[base+hint-lastOfs] final int maxOfs = hint + 1; while (ofs < maxOfs && key.compareTo(a[base + hint - ofs]) <= 0) { lastOfs = ofs; ofs = (ofs << 1) + 1; if (ofs <= 0) // int overflow ofs = maxOfs; } if (ofs > maxOfs) ofs = maxOfs; // Make offsets relative to base int tmp = lastOfs; lastOfs = hint - ofs; ofs = hint - tmp; } lastOfs++; while (lastOfs < ofs) { int m = lastOfs + ((ofs - lastOfs) >>> 1); if (key.compareTo(a[base + m]) > 0) lastOfs = m + 1; // a[base + m] < key else ofs = m; // key <= a[base + m] } return ofs; } private static int gallopRight(Comparable<Object> key, Object[] a, int base, int len, int hint) { int ofs = 1; int lastOfs = 0; if (key.compareTo(a[base + hint]) < 0) { // Gallop left until a[b+hint - ofs] <= key < a[b+hint - lastOfs] int maxOfs = hint + 1; while (ofs < maxOfs && key.compareTo(a[base + hint - ofs]) < 0) { lastOfs = ofs; ofs = (ofs << 1) + 1; if (ofs <= 0) // int overflow ofs = maxOfs; } if (ofs > maxOfs) ofs = maxOfs; // Make offsets relative to b int tmp = lastOfs; lastOfs = hint - ofs; ofs = hint - tmp; } else { // a[b + hint] <= key // Gallop right until a[b+hint + lastOfs] <= key < a[b+hint + ofs] int maxOfs = len - hint; while (ofs < maxOfs && key.compareTo(a[base + hint + ofs]) >= 0) { lastOfs = ofs; ofs = (ofs << 1) + 1; if (ofs <= 0) // int overflow ofs = maxOfs; } if (ofs > maxOfs) ofs = maxOfs; // Make offsets relative to b lastOfs += hint; ofs += hint; } lastOfs++; while (lastOfs < ofs) { int m = lastOfs + ((ofs - lastOfs) >>> 1); if (key.compareTo(a[base + m]) < 0) ofs = m; // key < a[b + m] else lastOfs = m + 1; // a[b + m] <= key } return ofs; } @SuppressWarnings("unchecked") private void mergeLo(int base1, int len1, int base2, int len2) { // Copy first run into temp array Object[] a = this.a; // For performance Object[] tmp = ensureCapacity(len1); System.arraycopy(a, base1, tmp, 0, len1); int cursor1 = 0; // Indexes into tmp array int cursor2 = base2; // Indexes int a int dest = base1; // Indexes int a // Move first element of second run and deal with degenerate cases a[dest++] = a[cursor2++]; if (--len2 == 0) { System.arraycopy(tmp, cursor1, a, dest, len1); return; } if (len1 == 1) { System.arraycopy(a, cursor2, a, dest, len2); a[dest + len2] = tmp[cursor1]; // Last elt of run 1 to end of merge return; } int minGallop = this.minGallop; // Use local variable for performance outer: while (true) { int count1 = 0; // Number of times in a row that first run won int count2 = 0; // Number of times in a row that second run won /* * Do the straightforward thing until (if ever) one run starts * winning consistently. */ do { if (((Comparable) a[cursor2]).compareTo(tmp[cursor1]) < 0) { a[dest++] = a[cursor2++]; count2++; count1 = 0; if (--len2 == 0) break outer; } else { a[dest++] = tmp[cursor1++]; count1++; count2 = 0; if (--len1 == 1) break outer; } } while ((count1 | count2) < minGallop); /* * One run is winning so consistently that galloping may be a * huge win. So try that, and continue galloping until (if ever) * neither run appears to be winning consistently anymore. */ do { count1 = gallopRight((Comparable) a[cursor2], tmp, cursor1, len1, 0); if (count1 != 0) { System.arraycopy(tmp, cursor1, a, dest, count1); dest += count1; cursor1 += count1; len1 -= count1; if (len1 <= 1) // len1 == 1 || len1 == 0 break outer; } a[dest++] = a[cursor2++]; if (--len2 == 0) break outer; count2 = gallopLeft((Comparable) tmp[cursor1], a, cursor2, len2, 0); if (count2 != 0) { System.arraycopy(a, cursor2, a, dest, count2); dest += count2; cursor2 += count2; len2 -= count2; if (len2 == 0) break outer; } a[dest++] = tmp[cursor1++]; if (--len1 == 1) break outer; minGallop--; } while (count1 >= MIN_GALLOP | count2 >= MIN_GALLOP); if (minGallop < 0) minGallop = 0; minGallop += 2; // Penalize for leaving gallop mode } // End of "outer" loop this.minGallop = minGallop < 1 ? 1 : minGallop; // Write back to field if (len1 == 1) { System.arraycopy(a, cursor2, a, dest, len2); a[dest + len2] = tmp[cursor1]; // Last elt of run 1 to end of merge } else if (len1 == 0) { throw new IllegalArgumentException( "Comparison method violates its general contract!"); } else { System.arraycopy(tmp, cursor1, a, dest, len1); } } @SuppressWarnings("unchecked") private void mergeHi(int base1, int len1, int base2, int len2) { // Copy second run into temp array Object[] a = this.a; // For performance Object[] tmp = ensureCapacity(len2); System.arraycopy(a, base2, tmp, 0, len2); int cursor1 = base1 + len1 - 1; // Indexes into a int cursor2 = len2 - 1; // Indexes into tmp array int dest = base2 + len2 - 1; // Indexes into a // Move last element of first run and deal with degenerate cases a[dest--] = a[cursor1--]; if (--len1 == 0) { System.arraycopy(tmp, 0, a, dest - (len2 - 1), len2); return; } if (len2 == 1) { dest -= len1; cursor1 -= len1; System.arraycopy(a, cursor1 + 1, a, dest + 1, len1); a[dest] = tmp[cursor2]; return; } int minGallop = this.minGallop; // Use local variable for performance outer: while (true) { int count1 = 0; // Number of times in a row that first run won int count2 = 0; // Number of times in a row that second run won do { if (((Comparable) tmp[cursor2]).compareTo(a[cursor1]) < 0) { a[dest--] = a[cursor1--]; count1++; count2 = 0; if (--len1 == 0) break outer; } else { a[dest--] = tmp[cursor2--]; count2++; count1 = 0; if (--len2 == 1) break outer; } } while ((count1 | count2) < minGallop); do { count1 = len1 - gallopRight((Comparable) tmp[cursor2], a, base1, len1, len1 - 1); if (count1 != 0) { dest -= count1; cursor1 -= count1; len1 -= count1; System.arraycopy(a, cursor1 + 1, a, dest + 1, count1); if (len1 == 0) break outer; } a[dest--] = tmp[cursor2--]; if (--len2 == 1) break outer; count2 = len2 - gallopLeft((Comparable) a[cursor1], tmp, 0, len2, len2 - 1); if (count2 != 0) { dest -= count2; cursor2 -= count2; len2 -= count2; System.arraycopy(tmp, cursor2 + 1, a, dest + 1, count2); if (len2 <= 1) break outer; // len2 == 1 || len2 == 0 } a[dest--] = a[cursor1--]; if (--len1 == 0) break outer; minGallop--; } while (count1 >= MIN_GALLOP | count2 >= MIN_GALLOP); if (minGallop < 0) minGallop = 0; minGallop += 2; // Penalize for leaving gallop mode } // End of "outer" loop this.minGallop = minGallop < 1 ? 1 : minGallop; // Write back to field if (len2 == 1) { dest -= len1; cursor1 -= len1; System.arraycopy(a, cursor1 + 1, a, dest + 1, len1); a[dest] = tmp[cursor2]; // Move first elt of run2 to front of merge } else if (len2 == 0) { throw new IllegalArgumentException( "Comparison method violates its general contract!"); } else { System.arraycopy(tmp, 0, a, dest - (len2 - 1), len2); } } private Object[] ensureCapacity(int minCapacity) { if (tmp.length < minCapacity) { // Compute smallest power of 2 > minCapacity int newSize = minCapacity; newSize |= newSize >> 1; newSize |= newSize >> 2; newSize |= newSize >> 4; newSize |= newSize >> 8; newSize |= newSize >> 16; newSize++; if (newSize < 0) // Not bloody likely! newSize = minCapacity; else newSize = Math.min(newSize, a.length >>> 1); @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) Object[] newArray = new Object[newSize]; tmp = newArray; } return tmp; } }
package com.isawabird.parse; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.util.EntityUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.accounts.Account; import android.content.AbstractThreadedSyncAdapter; import android.content.ContentProviderClient; import android.content.Context; import android.content.SyncResult; import android.os.Bundle; import android.util.Log; import com.isawabird.BirdList; import com.isawabird.Consts; import com.isawabird.Sighting; import com.isawabird.Utils; import com.isawabird.db.DBConsts; import com.isawabird.db.DBHandler; import com.parse.Parse; public class ParseSyncAdapter extends AbstractThreadedSyncAdapter { private DBHandler dh; private JSONArray requestArray = new JSONArray(); private ArrayList<Long> postEntries = new ArrayList<Long>(); private static final int MAX_REQUESTS_PER_MONTH = 1000000; private static final int NUM_ACTIVE_USERS = 5000; private static final float QUOTA_PER_MONTH = MAX_REQUESTS_PER_MONTH / NUM_ACTIVE_USERS; // 200 public ParseSyncAdapter(Context context, boolean autoInitialize) { super(context, autoInitialize); dh = DBHandler.getInstance(context); } /* * We have 1,000,000 requests per month available for free from Parse. * Assuming 5000 active users a month, it leaves 200 /user/month = 6.666 / * day. Use this method to throttle the number of requests we send to Parse. */ private boolean areSyncCreditsAvailable() { Calendar lastSyncDate = Calendar.getInstance(); /* We can't use Utils.prefs because sync happens independent of the app */ // MainActivity.getContext().getSharedPreferences("MyPrefs",Context.MODE_PRIVATE); lastSyncDate.setTimeInMillis(Utils.getLastSyncDate()); if (lastSyncDate.get(Calendar.MONTH) != Calendar.getInstance().get(Calendar.MONTH) || lastSyncDate.get(Calendar.YEAR) != Calendar.getInstance().get(Calendar.YEAR)) { /* * We are syncing for the first time this month. Reset the request * count and return true */ Utils.resetNumberRequestsThisMonth(); return true; } int date = Calendar.getInstance().get(Calendar.DATE); int requestsSpentThisMonth = Utils.getNumberOfRequestsThisMonth(); float quotaPerDay = QUOTA_PER_MONTH / Calendar.getInstance().getActualMaximum(Calendar.DATE); float availableRequests = (date * quotaPerDay) - requestsSpentThisMonth; Log.i(Consts.TAG, " We have " + availableRequests + " requests remaining this month"); return (availableRequests > 0); } private void syncBirdLists() { try { // get bird list to sync create/update/delete ArrayList<BirdList> birdListToSync = dh.getBirdListToSync(); ArrayList<Long> staleEntries = new ArrayList<Long>(); JSONObject body = null; for (BirdList birdList : birdListToSync) { ////Log.i(Consts.TAG, "Adding to postEntries " + birdList.getId()); if (birdList.isMarkedForDelete()) { // DELETE if (birdList.getParseObjectID() == null) { // exclude DELETE since object is not created at server // yet staleEntries.add(birdList.getId()); } else { if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ // include DELETE postEntries.add(birdList.getId()); addDeleteRequest(birdList.getParseObjectID(), DBConsts.TABLE_LIST); } } } else { // if not delete, then it is marked for upload body = new JSONObject(); body.put(DBConsts.LIST_NAME, birdList.getListName()); body.put(DBConsts.LIST_USER, birdList.getUsername()); body.put(DBConsts.LIST_NOTES, birdList.getNotes()); body.put(DBConsts.LIST_DATE, getDateInParseFormat(birdList.getDate())); if (birdList.getParseObjectID() == null) { if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ // CREATE postEntries.add(birdList.getId()); addCreateRequest(DBConsts.TABLE_LIST, body); } } else { if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ // UPDATE postEntries.add(birdList.getId()); addUpdateRequest(birdList.getParseObjectID(), DBConsts.TABLE_LIST, body); } } } } // delete staleEntries from db for (Long id : staleEntries) { dh.deleteLocally(DBConsts.TABLE_LIST, id); } } catch (JSONException ex) { Log.e(Consts.TAG, ex.getMessage()); ex.printStackTrace(); } } private void syncSightings() { try { // get bird list to sync create/update/delete ArrayList<Sighting> sightingsToSync = dh.getSightingsToSync(); ArrayList<Long> staleEntries = new ArrayList<Long>(); JSONObject body = null; for (Sighting sighting : sightingsToSync) { if (sighting.isMarkedForDelete()) { // DELETE if (sighting.getParseObjectID() == null) { // exclude DELETE since object is not created at server // yet staleEntries.add(sighting.getId()); } else { if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ // include DELETE postEntries.add(sighting.getId()); addDeleteRequest(sighting.getParseObjectID(), DBConsts.TABLE_SIGHTING); } } } else { // if not delete, then it is marked for upload body = new JSONObject(); body.put(DBConsts.SIGHTING_SPECIES, sighting.getSpecies().getFullName()); body.put(DBConsts.SIGHTING_NOTES, sighting.getNotes()); body.put(DBConsts.SIGHTING_DATE, getDateInParseFormat(sighting.getDate())); body.put(DBConsts.SIGHTING_LATITUDE, sighting.getLatitude()); body.put(DBConsts.SIGHTING_LONGITUDE, sighting.getLongitude()); body.put(DBConsts.SIGHTING_IS_HEARD_ONLY, sighting.isHeardOnly()); if(sighting.getListParseObjectId() != null && !sighting.getListParseObjectId().isEmpty()) { body.put(DBConsts.SIGHTING_LIST_ID, sighting.getListParseObjectId()); } else { Log.wtf(Consts.TAG, "Shouldn't be here. listParseID: " + sighting.getListParseObjectId()); } if (sighting.getParseObjectID() == null) { if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ // CREATE postEntries.add(sighting.getId()); addCreateRequest(DBConsts.TABLE_SIGHTING, body); } } else { if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ // UPDATE postEntries.add(sighting.getId()); addUpdateRequest(sighting.getParseObjectID(), DBConsts.TABLE_SIGHTING, body); } } } } // delete staleEntries from db for (Long id : staleEntries) { dh.deleteLocally(DBConsts.TABLE_SIGHTING, id); } //TODO: delete sightings that are marked for delete whose parent listParseObjectId is null } catch (JSONException ex) { Log.e(Consts.TAG, ex.getMessage()); ex.printStackTrace(); } } private void syncFeedback() { try { // get bird list to sync create/update/delete JSONArray feedbackToSync = dh.getFeedbackToSync(); JSONObject body = null; for (int i = 0 ; i < feedbackToSync.length() ; i ++) { // if not delete, then it is marked for upload body = new JSONObject(); body.put(DBConsts.FEEDBACK_USER, ParseUtils.getCurrentUsername()); body.put(DBConsts.FEEDBACK_DATE, getDateInParseFormat(new Date())); body.put(DBConsts.FEEDBACK_TEXT , feedbackToSync.getJSONObject(i).getString("feedbackText")); // TODO Externalize if (requestArray.length() < 49 ) { /* Parse accepts only 50 requests in a batch */ addCreateRequest(DBConsts.TABLE_FEEDBACK, body); postEntries.add((long)feedbackToSync.getJSONObject(i).getInt("feedbackId")); // TODO Externalize } } } catch (JSONException ex) { Log.e(Consts.TAG, ex.getMessage()); ex.printStackTrace(); } } private JSONObject getDateInParseFormat(Date date){ JSONObject dateObj = new JSONObject(); try{ SimpleDateFormat dateformat = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); dateObj.put("__type", "Date"); dateObj.put("iso", dateformat.format(date)); }catch(JSONException ex){ return null; } return dateObj; } @Override public void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult) { try { Utils.initializePrefs(getContext()); if (Utils.isNetworkAvailable(getContext()) && (areSyncCreditsAvailable() || extras.getBoolean(Consts.OVERRIDE_THROTTLE))) { Parse.initialize(getContext(), ParseConsts.APP_ID, ParseConsts.REST_CLIENT_KEY); syncBirdLists(); syncSightings(); syncFeedback(); if (requestArray.length() > 0) { JSONObject batchRequest = buildRequest(requestArray); JSONArray respArray = postRequest(batchRequest); /* Parse the response */ if (respArray != null) { for (int i = 0; i < respArray.length(); i++) { JSONObject reponseObject = respArray.getJSONObject(i); if (reponseObject.has(ParseConsts.SUCCESS)) { JSONObject requestObj = requestArray.getJSONObject(i); String method = requestObj.getString("method"); String table = requestObj.getString("className"); // update parseObjectId for POST requests if (method == "POST") { // We added a new entry to Parse String objID = reponseObject.getJSONObject(ParseConsts.SUCCESS).getString(ParseConsts.OBJECTID); dh.updateParseObjectID(table, postEntries.get(i), objID); } else if (method == "PUT") { // We Updated Parse. So, just reset the // upload required flag. dh.resetUploadRequiredFlag(table, postEntries.get(i)); } else if (method == "DELETE") { // delete invalid rows for DELETE requests dh.deleteLocally(table, postEntries.get(i)); } // TODO Remove later // dh.dumpTable(table); } else { // TODO : Handle failure } } } } requestArray = new JSONArray(); postEntries = new ArrayList<Long>(); } } catch (Exception e) { String err; if (e.getMessage()==null){ err = "Sync Failed"; Log.e(Consts.TAG, err); }else { err = e.getMessage(); Log.e(Consts.TAG, err); e.printStackTrace(); } } } public JSONArray postRequest(JSONObject batchRequest) { try { if (batchRequest == null) return null; HttpClient client = new DefaultHttpClient(); HttpPost postReq = new HttpPost(ParseConsts.BATCH_URL); //Log.i(Consts.TAG, "Sending request..."); postReq.addHeader("X-Parse-Application-Id", ParseConsts.APP_ID); postReq.addHeader("X-Parse-REST-API-Key", ParseConsts.REST_CLIENT_KEY); postReq.addHeader("Content-Type", "application/json"); //Log.i(Consts.TAG, "Request to be sent : " + batchRequest.toString()); StringEntity entity = new StringEntity(batchRequest.toString()); postReq.setEntity(entity); HttpResponse resp = client.execute(postReq); HttpEntity respEntity = resp.getEntity(); String response = EntityUtils.toString(respEntity); //Log.i(Consts.TAG, "Response is " + response); Utils.incrementNumberRequestsThisMonth(); //Log.i(Consts.TAG, "Number of requests so far this month " +Utils.getNumberOfRequestsThisMonth()); return new JSONArray(response); } catch (Exception e) { Log.e(Consts.TAG, e.getMessage()); e.printStackTrace(); } return null; } public JSONObject buildRequest(JSONArray requestArray) { try { JSONObject request = new JSONObject(); request.put("requests", requestArray); return request; } catch (JSONException e) { Log.e(Consts.TAG, "error in buildRequest: " + e.getMessage()); } return null; } public void addCreateRequest(String objectName, JSONObject body) { //Log.i(Consts.TAG, " >> addCreateRequest for " + body.toString()); JSONObject createRequest = new JSONObject(); try { createRequest.put("method", "POST"); createRequest.put("path", "/1/classes/" + objectName); createRequest.put("body", body); createRequest.put("className", objectName); } catch (JSONException e) { Log.e(Consts.TAG, "error in addCreateRequest: " + e.getMessage()); return; } requestArray.put(createRequest); } public void addUpdateRequest(String objectId, String objectName, JSONObject body) { JSONObject updateRequest = new JSONObject(); try { updateRequest.put("method", "PUT"); updateRequest.put("path", "/1/classes/" + objectName + "/" + objectId); updateRequest.put("body", body); updateRequest.put("className", objectName); } catch (JSONException e) { Log.e(Consts.TAG, "error in addUpdateRequest: " + e.getMessage()); return; } requestArray.put(updateRequest); } public void addDeleteRequest(String objectId, String objectName) { JSONObject deleteRequest = new JSONObject(); try { deleteRequest.put("method", "DELETE"); deleteRequest.put("path", "/1/classes/" + objectName + "/" + objectId); deleteRequest.put("className", objectName); } catch (JSONException e) { Log.e(Consts.TAG, "error in addDeleteRequest: " + e.getMessage()); return; } requestArray.put(deleteRequest); } }
/* * Copyright 2014 http://Bither.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bither.activity.cold; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.TextView; import net.bither.BitherSetting; import net.bither.R; import net.bither.bitherj.core.Address; import net.bither.bitherj.core.AddressManager; import net.bither.bitherj.crypto.ECKey; import net.bither.bitherj.crypto.SecureCharSequence; import net.bither.bitherj.crypto.hd.DeterministicKey; import net.bither.bitherj.qrcode.QRCodeTxTransport; import net.bither.bitherj.qrcode.QRCodeUtil; import net.bither.bitherj.utils.Utils; import net.bither.preference.AppSharedPreference; import net.bither.qrcode.BitherQRCodeActivity; import net.bither.qrcode.ScanActivity; import net.bither.qrcode.ScanQRCodeTransportActivity; import net.bither.ui.base.DropdownMessage; import net.bither.ui.base.SwipeRightActivity; import net.bither.ui.base.dialog.DialogPassword; import net.bither.ui.base.dialog.DialogProgress; import net.bither.ui.base.listener.IBackClickListener; import net.bither.ui.base.listener.IDialogPasswordListener; import net.bither.util.UnitUtilWrapper; import net.bither.util.WalletUtils; import java.util.ArrayList; import java.util.List; public class SignTxActivity extends SwipeRightActivity implements IDialogPasswordListener { private TextView tvFrom; private TextView tvTo; private TextView tvAmount; private TextView tvFee; private TextView tvSymbol; private TextView tvFeeSymbol; private View llChange; private TextView tvAddressChange; private TextView tvAmountChange; private TextView tvSymbolChange; private Button btnSign; private TextView tvCannotFindPrivateKey; private QRCodeTxTransport qrCodeTransport; private DialogProgress dp; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sign_tx); toScanActivity(); initView(); } private void initView() { findViewById(R.id.ibtn_cancel).setOnClickListener( new IBackClickListener(0, R.anim.slide_out_right)); tvFrom = (TextView) findViewById(R.id.tv_address_from); tvTo = (TextView) findViewById(R.id.tv_address_to); tvAmount = (TextView) findViewById(R.id.tv_amount); tvFee = (TextView) findViewById(R.id.tv_fee); llChange = findViewById(R.id.ll_change); tvAddressChange = (TextView) findViewById(R.id.tv_address_change); tvAmountChange = (TextView) findViewById(R.id.tv_amount_change); tvSymbolChange = (TextView) findViewById(R.id.tv_symbol_change); btnSign = (Button) findViewById(R.id.btn_sign); tvCannotFindPrivateKey = (TextView) findViewById(R.id.tv_can_not_find_private_key); tvSymbol = (TextView) findViewById(R.id.tv_symbol); tvFeeSymbol = (TextView) findViewById(R.id.tv_fee_symbol); btnSign.setEnabled(false); btnSign.setOnClickListener(signClick); dp = new DialogProgress(this, R.string.signing_transaction); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == BitherSetting.INTENT_REF.SCAN_REQUEST_CODE && resultCode == Activity.RESULT_OK) { String str = data.getExtras().getString( ScanActivity.INTENT_EXTRA_RESULT); qrCodeTransport = QRCodeTxTransport.formatQRCodeTransport(str); if (qrCodeTransport != null) { showTransaction(); } else { super.finish(); } } else { super.finish(); return; } super.onActivityResult(requestCode, resultCode, data); } private void showTransaction() { String symbol = AppSharedPreference.getInstance().getBitcoinUnit().name(); tvSymbol.setText(symbol); tvFeeSymbol.setText(symbol); tvSymbolChange.setText(symbol); tvFrom.setText(WalletUtils.formatHash(qrCodeTransport.getMyAddress(), 4, qrCodeTransport.getMyAddress().length())); tvTo.setText(WalletUtils.formatHash(qrCodeTransport.getToAddress(), 4, qrCodeTransport.getToAddress().length())); tvAmount.setText(UnitUtilWrapper.formatValueWithBold(qrCodeTransport.getTo())); tvFee.setText(UnitUtilWrapper.formatValueWithBold(qrCodeTransport.getFee())); llChange.setVisibility(View.GONE); if(!Utils.isEmpty(qrCodeTransport.getChangeAddress())){ llChange.setVisibility(View.VISIBLE); tvAddressChange.setText(WalletUtils.formatHash(qrCodeTransport.getChangeAddress(), 4, qrCodeTransport.getChangeAddress().length())); tvAmountChange.setText(UnitUtilWrapper.formatValueWithBold(qrCodeTransport.getChangeAmt())); } Address address = WalletUtils .findPrivateKey(qrCodeTransport.getMyAddress()); if ((qrCodeTransport.getHdmIndex() < 0 && address == null) || (qrCodeTransport .getHdmIndex() >= 0 && !AddressManager.getInstance().hasHDMKeychain())) { btnSign.setEnabled(false); tvCannotFindPrivateKey.setVisibility(View.VISIBLE); } else { btnSign.setEnabled(true); tvCannotFindPrivateKey.setVisibility(View.GONE); } } private OnClickListener signClick = new OnClickListener() { @Override public void onClick(View v) { DialogPassword dialogPassword = new DialogPassword( SignTxActivity.this, SignTxActivity.this); dialogPassword.show(); } }; @Override public void onPasswordEntered(final SecureCharSequence password) { Thread thread = new Thread() { public void run() { List<String> strings = null; if (qrCodeTransport.getHdmIndex() >= 0) { if (!AddressManager.getInstance().hasHDMKeychain()) { dp.setThread(null); runOnUiThread(new Runnable() { @Override public void run() { dp.dismiss(); DropdownMessage.showDropdownMessage(SignTxActivity.this, R.string.hdm_send_with_cold_no_requested_seed); } }); password.wipe(); return; } try { DeterministicKey key = AddressManager.getInstance().getHdmKeychain() .getExternalKey(qrCodeTransport.getHdmIndex(), password); List<String> hashes = qrCodeTransport.getHashList(); strings = new ArrayList<String>(); for (String hash : hashes) { ECKey.ECDSASignature signed = key.sign(Utils.hexStringToByteArray (hash)); strings.add(Utils.bytesToHexString(signed.encodeToDER())); } key.wipe(); } catch (Exception e) { e.printStackTrace(); dp.setThread(null); runOnUiThread(new Runnable() { @Override public void run() { dp.dismiss(); DropdownMessage.showDropdownMessage(SignTxActivity.this, R.string.hdm_send_with_cold_no_requested_seed); } }); password.wipe(); return; } } else { Address address = WalletUtils.findPrivateKey(qrCodeTransport.getMyAddress()); strings = address.signStrHashes(qrCodeTransport.getHashList(), password); } password.wipe(); String result = ""; for (int i = 0; i < strings.size(); i++) { if (i < strings.size() - 1) { result = result + strings.get(i) + QRCodeUtil.QR_CODE_SPLIT; } else { result = result + strings.get(i); } } final String r = result; dp.setThread(null); runOnUiThread(new Runnable() { @Override public void run() { dp.dismiss(); Intent intent = new Intent(SignTxActivity.this, BitherQRCodeActivity.class); intent.putExtra(BitherSetting.INTENT_REF.QR_CODE_STRING, r); intent.putExtra(BitherSetting.INTENT_REF.TITLE_STRING, getString(R.string.signed_transaction_qr_code_title)); startActivity(intent); finish(); } }); } ; }; dp.setThread(thread); thread.start(); dp.show(); } @Override public void finish() { super.finish(); overridePendingTransition(0, R.anim.slide_out_right); } private void toScanActivity() { Intent intent = new Intent(SignTxActivity.this, ScanQRCodeTransportActivity.class); intent.putExtra(BitherSetting.INTENT_REF.TITLE_STRING, getString(R.string.scan_unsigned_transaction_title)); startActivityForResult(intent, BitherSetting.INTENT_REF.SCAN_REQUEST_CODE); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.io.retry; import java.io.IOException; import java.net.ConnectException; import java.net.NoRouteToHostException; import java.net.SocketException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.net.ConnectTimeoutException; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.util.Time; /** * <p> * A collection of useful implementations of {@link RetryPolicy}. * </p> */ public class RetryPolicies { public static final Log LOG = LogFactory.getLog(RetryPolicies.class); private static ThreadLocal<Random> RANDOM = new ThreadLocal<Random>() { @Override protected Random initialValue() { return new Random(); } }; /** * <p> * Try once, and fail by re-throwing the exception. * This corresponds to having no retry mechanism in place. * </p> */ public static final RetryPolicy TRY_ONCE_THEN_FAIL = new TryOnceThenFail(); /** * <p> * Keep trying forever. * </p> */ public static final RetryPolicy RETRY_FOREVER = new RetryForever(); /** * <p> * Keep trying a limited number of times, waiting a fixed time between attempts, * and then fail by re-throwing the exception. * </p> */ public static final RetryPolicy retryUpToMaximumCountWithFixedSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumCountWithFixedSleep(maxRetries, sleepTime, timeUnit); } /** * <p> * Keep trying for a maximum time, waiting a fixed time between attempts, * and then fail by re-throwing the exception. * </p> */ public static final RetryPolicy retryUpToMaximumTimeWithFixedSleep(long maxTime, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumTimeWithFixedSleep(maxTime, sleepTime, timeUnit); } /** * <p> * Keep trying a limited number of times, waiting a growing amount of time between attempts, * and then fail by re-throwing the exception. * The time between attempts is <code>sleepTime</code> mutliplied by the number of tries so far. * </p> */ public static final RetryPolicy retryUpToMaximumCountWithProportionalSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumCountWithProportionalSleep(maxRetries, sleepTime, timeUnit); } /** * <p> * Keep trying a limited number of times, waiting a growing amount of time between attempts, * and then fail by re-throwing the exception. * The time between attempts is <code>sleepTime</code> mutliplied by a random * number in the range of [0, 2 to the number of retries) * </p> */ public static final RetryPolicy exponentialBackoffRetry( int maxRetries, long sleepTime, TimeUnit timeUnit) { return new ExponentialBackoffRetry(maxRetries, sleepTime, timeUnit); } /** * <p> * Set a default policy with some explicit handlers for specific exceptions. * </p> */ public static final RetryPolicy retryByException(RetryPolicy defaultPolicy, Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) { return new ExceptionDependentRetry(defaultPolicy, exceptionToPolicyMap); } /** * <p> * A retry policy for RemoteException * Set a default policy with some explicit handlers for specific exceptions. * </p> */ public static final RetryPolicy retryByRemoteException( RetryPolicy defaultPolicy, Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) { return new RemoteExceptionDependentRetry(defaultPolicy, exceptionToPolicyMap); } public static final RetryPolicy failoverOnNetworkException(int maxFailovers) { return failoverOnNetworkException(TRY_ONCE_THEN_FAIL, maxFailovers); } public static final RetryPolicy failoverOnNetworkException( RetryPolicy fallbackPolicy, int maxFailovers) { return failoverOnNetworkException(fallbackPolicy, maxFailovers, 0, 0); } public static final RetryPolicy failoverOnNetworkException( RetryPolicy fallbackPolicy, int maxFailovers, long delayMillis, long maxDelayBase) { return new FailoverOnNetworkExceptionRetry(fallbackPolicy, maxFailovers, delayMillis, maxDelayBase); } public static final RetryPolicy failoverOnNetworkException( RetryPolicy fallbackPolicy, int maxFailovers, int maxRetries, long delayMillis, long maxDelayBase) { return new FailoverOnNetworkExceptionRetry(fallbackPolicy, maxFailovers, maxRetries, delayMillis, maxDelayBase); } static class TryOnceThenFail implements RetryPolicy { @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { return RetryAction.FAIL; } } static class RetryForever implements RetryPolicy { @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { return RetryAction.RETRY; } } /** * Retry up to maxRetries. * The actual sleep time of the n-th retry is f(n, sleepTime), * where f is a function provided by the subclass implementation. * * The object of the subclasses should be immutable; * otherwise, the subclass must override hashCode(), equals(..) and toString(). */ static abstract class RetryLimited implements RetryPolicy { final int maxRetries; final long sleepTime; final TimeUnit timeUnit; private String myString; RetryLimited(int maxRetries, long sleepTime, TimeUnit timeUnit) { if (maxRetries < 0) { throw new IllegalArgumentException("maxRetries = " + maxRetries+" < 0"); } if (sleepTime < 0) { throw new IllegalArgumentException("sleepTime = " + sleepTime + " < 0"); } this.maxRetries = maxRetries; this.sleepTime = sleepTime; this.timeUnit = timeUnit; } @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { if (retries >= maxRetries) { return RetryAction.FAIL; } return new RetryAction(RetryAction.RetryDecision.RETRY, timeUnit.toMillis(calculateSleepTime(retries))); } protected abstract long calculateSleepTime(int retries); @Override public int hashCode() { return toString().hashCode(); } @Override public boolean equals(final Object that) { if (this == that) { return true; } else if (that == null || this.getClass() != that.getClass()) { return false; } return this.toString().equals(that.toString()); } @Override public String toString() { if (myString == null) { myString = getClass().getSimpleName() + "(maxRetries=" + maxRetries + ", sleepTime=" + sleepTime + " " + timeUnit + ")"; } return myString; } } static class RetryUpToMaximumCountWithFixedSleep extends RetryLimited { public RetryUpToMaximumCountWithFixedSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { super(maxRetries, sleepTime, timeUnit); } @Override protected long calculateSleepTime(int retries) { return sleepTime; } } static class RetryUpToMaximumTimeWithFixedSleep extends RetryUpToMaximumCountWithFixedSleep { private final long timeLimit; private String myString; public RetryUpToMaximumTimeWithFixedSleep(long maxTime, long sleepTime, TimeUnit timeUnit) { super((int) (maxTime / sleepTime), sleepTime, timeUnit); this.timeLimit = Time.monotonicNow() + timeUnit.toMillis(maxTime); } @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { if (Time.monotonicNow() > timeLimit) { return RetryAction.FAIL; } return super.shouldRetry(e, retries, failovers, isIdempotentOrAtMostOnce); } @Override public String toString() { if (myString == null) { myString = getClass().getSimpleName() + "(maxRetries=" + maxRetries + ", sleepTime=" + sleepTime + " " + timeUnit + ", timeToFail=" + timeLimit + ")"; } return myString; } } static class RetryUpToMaximumCountWithProportionalSleep extends RetryLimited { public RetryUpToMaximumCountWithProportionalSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { super(maxRetries, sleepTime, timeUnit); } @Override protected long calculateSleepTime(int retries) { return sleepTime * (retries + 1); } } /** * Given pairs of number of retries and sleep time (n0, t0), (n1, t1), ..., * the first n0 retries sleep t0 milliseconds on average, * the following n1 retries sleep t1 milliseconds on average, and so on. * * For all the sleep, the actual sleep time is randomly uniform distributed * in the close interval [0.5t, 1.5t], where t is the sleep time specified. * * The objects of this class are immutable. */ public static class MultipleLinearRandomRetry implements RetryPolicy { /** Pairs of numRetries and sleepSeconds */ public static class Pair { final int numRetries; final int sleepMillis; public Pair(final int numRetries, final int sleepMillis) { if (numRetries < 0) { throw new IllegalArgumentException("numRetries = " + numRetries+" < 0"); } if (sleepMillis < 0) { throw new IllegalArgumentException("sleepMillis = " + sleepMillis + " < 0"); } this.numRetries = numRetries; this.sleepMillis = sleepMillis; } @Override public String toString() { return numRetries + "x" + sleepMillis + "ms"; } } private final List<Pair> pairs; private String myString; public MultipleLinearRandomRetry(List<Pair> pairs) { if (pairs == null || pairs.isEmpty()) { throw new IllegalArgumentException("pairs must be neither null nor empty."); } this.pairs = Collections.unmodifiableList(pairs); } @Override public RetryAction shouldRetry(Exception e, int curRetry, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { final Pair p = searchPair(curRetry); if (p == null) { //no more retries. return RetryAction.FAIL; } //calculate sleep time and return. final double ratio = RANDOM.get().nextDouble() + 0.5;//0.5 <= ratio <=1.5 final long sleepTime = Math.round(p.sleepMillis * ratio); return new RetryAction(RetryAction.RetryDecision.RETRY, sleepTime); } /** * Given the current number of retry, search the corresponding pair. * @return the corresponding pair, * or null if the current number of retry > maximum number of retry. */ private Pair searchPair(int curRetry) { int i = 0; for(; i < pairs.size() && curRetry > pairs.get(i).numRetries; i++) { curRetry -= pairs.get(i).numRetries; } return i == pairs.size()? null: pairs.get(i); } @Override public int hashCode() { return toString().hashCode(); } @Override public boolean equals(final Object that) { if (this == that) { return true; } else if (that == null || this.getClass() != that.getClass()) { return false; } return this.toString().equals(that.toString()); } @Override public String toString() { if (myString == null) { myString = getClass().getSimpleName() + pairs; } return myString; } /** * Parse the given string as a MultipleLinearRandomRetry object. * The format of the string is "t_1, n_1, t_2, n_2, ...", * where t_i and n_i are the i-th pair of sleep time and number of retires. * Note that the white spaces in the string are ignored. * * @return the parsed object, or null if the parsing fails. */ public static MultipleLinearRandomRetry parseCommaSeparatedString(String s) { final String[] elements = s.split(","); if (elements.length == 0) { LOG.warn("Illegal value: there is no element in \"" + s + "\"."); return null; } if (elements.length % 2 != 0) { LOG.warn("Illegal value: the number of elements in \"" + s + "\" is " + elements.length + " but an even number of elements is expected."); return null; } final List<RetryPolicies.MultipleLinearRandomRetry.Pair> pairs = new ArrayList<RetryPolicies.MultipleLinearRandomRetry.Pair>(); for(int i = 0; i < elements.length; ) { //parse the i-th sleep-time final int sleep = parsePositiveInt(elements, i++, s); if (sleep == -1) { return null; //parse fails } //parse the i-th number-of-retries final int retries = parsePositiveInt(elements, i++, s); if (retries == -1) { return null; //parse fails } pairs.add(new RetryPolicies.MultipleLinearRandomRetry.Pair(retries, sleep)); } return new RetryPolicies.MultipleLinearRandomRetry(pairs); } /** * Parse the i-th element as an integer. * @return -1 if the parsing fails or the parsed value <= 0; * otherwise, return the parsed value. */ private static int parsePositiveInt(final String[] elements, final int i, final String originalString) { final String s = elements[i].trim(); final int n; try { n = Integer.parseInt(s); } catch(NumberFormatException nfe) { LOG.warn("Failed to parse \"" + s + "\", which is the index " + i + " element in \"" + originalString + "\"", nfe); return -1; } if (n <= 0) { LOG.warn("The value " + n + " <= 0: it is parsed from the string \"" + s + "\" which is the index " + i + " element in \"" + originalString + "\""); return -1; } return n; } } static class ExceptionDependentRetry implements RetryPolicy { RetryPolicy defaultPolicy; Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap; public ExceptionDependentRetry(RetryPolicy defaultPolicy, Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) { this.defaultPolicy = defaultPolicy; this.exceptionToPolicyMap = exceptionToPolicyMap; } @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { RetryPolicy policy = exceptionToPolicyMap.get(e.getClass()); if (policy == null) { policy = defaultPolicy; } return policy.shouldRetry(e, retries, failovers, isIdempotentOrAtMostOnce); } } static class RemoteExceptionDependentRetry implements RetryPolicy { RetryPolicy defaultPolicy; Map<String, RetryPolicy> exceptionNameToPolicyMap; public RemoteExceptionDependentRetry(RetryPolicy defaultPolicy, Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) { this.defaultPolicy = defaultPolicy; this.exceptionNameToPolicyMap = new HashMap<String, RetryPolicy>(); for (Entry<Class<? extends Exception>, RetryPolicy> e : exceptionToPolicyMap.entrySet()) { exceptionNameToPolicyMap.put(e.getKey().getName(), e.getValue()); } } @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { RetryPolicy policy = null; if (e instanceof RemoteException) { policy = exceptionNameToPolicyMap.get( ((RemoteException) e).getClassName()); } if (policy == null) { policy = defaultPolicy; } return policy.shouldRetry(e, retries, failovers, isIdempotentOrAtMostOnce); } } static class ExponentialBackoffRetry extends RetryLimited { public ExponentialBackoffRetry( int maxRetries, long sleepTime, TimeUnit timeUnit) { super(maxRetries, sleepTime, timeUnit); if (maxRetries < 0) { throw new IllegalArgumentException("maxRetries = " + maxRetries + " < 0"); } else if (maxRetries >= Long.SIZE - 1) { //calculateSleepTime may overflow. throw new IllegalArgumentException("maxRetries = " + maxRetries + " >= " + (Long.SIZE - 1)); } } @Override protected long calculateSleepTime(int retries) { return calculateExponentialTime(sleepTime, retries + 1); } } /** * Fail over and retry in the case of: * Remote StandbyException (server is up, but is not the active server) * Immediate socket exceptions (e.g. no route to host, econnrefused) * Socket exceptions after initial connection when operation is idempotent * * The first failover is immediate, while all subsequent failovers wait an * exponentially-increasing random amount of time. * * Fail immediately in the case of: * Socket exceptions after initial connection when operation is not idempotent * * Fall back on underlying retry policy otherwise. */ static class FailoverOnNetworkExceptionRetry implements RetryPolicy { private RetryPolicy fallbackPolicy; private int maxFailovers; private int maxRetries; private long delayMillis; private long maxDelayBase; public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy, int maxFailovers) { this(fallbackPolicy, maxFailovers, 0, 0, 0); } public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy, int maxFailovers, long delayMillis, long maxDelayBase) { this(fallbackPolicy, maxFailovers, 0, delayMillis, maxDelayBase); } public FailoverOnNetworkExceptionRetry(RetryPolicy fallbackPolicy, int maxFailovers, int maxRetries, long delayMillis, long maxDelayBase) { this.fallbackPolicy = fallbackPolicy; this.maxFailovers = maxFailovers; this.maxRetries = maxRetries; this.delayMillis = delayMillis; this.maxDelayBase = maxDelayBase; } /** * @return 0 if this is our first failover/retry (i.e., retry immediately), * sleep exponentially otherwise */ private long getFailoverOrRetrySleepTime(int times) { return times == 0 ? 0 : calculateExponentialTime(delayMillis, times, maxDelayBase); } @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isIdempotentOrAtMostOnce) throws Exception { if (failovers >= maxFailovers) { return new RetryAction(RetryAction.RetryDecision.FAIL, 0, "failovers (" + failovers + ") exceeded maximum allowed (" + maxFailovers + ")"); } if (retries - failovers > maxRetries) { return new RetryAction(RetryAction.RetryDecision.FAIL, 0, "retries (" + retries + ") exceeded maximum allowed (" + maxRetries + ")"); } if (e instanceof ConnectException || e instanceof NoRouteToHostException || e instanceof UnknownHostException || e instanceof StandbyException || e instanceof ConnectTimeoutException || isWrappedStandbyException(e)) { return new RetryAction(RetryAction.RetryDecision.FAILOVER_AND_RETRY, getFailoverOrRetrySleepTime(failovers)); } else if (e instanceof RetriableException || getWrappedRetriableException(e) != null) { // RetriableException or RetriableException wrapped return new RetryAction(RetryAction.RetryDecision.RETRY, getFailoverOrRetrySleepTime(retries)); } else if (e instanceof SocketException || (e instanceof IOException && !(e instanceof RemoteException))) { if (isIdempotentOrAtMostOnce) { return RetryAction.FAILOVER_AND_RETRY; } else { return new RetryAction(RetryAction.RetryDecision.FAIL, 0, "the invoked method is not idempotent, and unable to determine " + "whether it was invoked"); } } else { return fallbackPolicy.shouldRetry(e, retries, failovers, isIdempotentOrAtMostOnce); } } } /** * Return a value which is <code>time</code> increasing exponentially as a * function of <code>retries</code>, +/- 0%-50% of that value, chosen * randomly. * * @param time the base amount of time to work with * @param retries the number of retries that have so occurred so far * @param cap value at which to cap the base sleep time * @return an amount of time to sleep */ private static long calculateExponentialTime(long time, int retries, long cap) { long baseTime = Math.min(time * (1L << retries), cap); return (long) (baseTime * (RANDOM.get().nextDouble() + 0.5)); } private static long calculateExponentialTime(long time, int retries) { return calculateExponentialTime(time, retries, Long.MAX_VALUE); } private static boolean isWrappedStandbyException(Exception e) { if (!(e instanceof RemoteException)) { return false; } Exception unwrapped = ((RemoteException)e).unwrapRemoteException( StandbyException.class); return unwrapped instanceof StandbyException; } private static RetriableException getWrappedRetriableException(Exception e) { if (!(e instanceof RemoteException)) { return null; } Exception unwrapped = ((RemoteException)e).unwrapRemoteException( RetriableException.class); return unwrapped instanceof RetriableException ? (RetriableException) unwrapped : null; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.editorActions; import com.intellij.application.options.editor.WebEditorOptions; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.codeInsight.lookup.impl.LookupImpl; import com.intellij.codeInspection.htmlInspections.RenameTagBeginOrEndIntentionAction; import com.intellij.ide.plugins.DynamicPluginListener; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.lang.Language; import com.intellij.lang.html.HTMLLanguage; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.lang.xhtml.XHTMLLanguage; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandEvent; import com.intellij.openapi.command.CommandListener; import com.intellij.openapi.command.undo.UndoManager; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.editor.event.EditorFactoryEvent; import com.intellij.openapi.editor.event.EditorFactoryListener; import com.intellij.openapi.editor.impl.EditorImpl; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.core.impl.PomModelImpl; import com.intellij.psi.*; import com.intellij.psi.impl.PsiDocumentManagerBase; import com.intellij.psi.impl.source.tree.LeafPsiElement; import com.intellij.psi.impl.source.tree.TreeUtil; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.templateLanguages.OuterLanguageElement; import com.intellij.psi.templateLanguages.TemplateLanguage; import com.intellij.psi.templateLanguages.TemplateLanguageUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlTag; import com.intellij.psi.xml.XmlToken; import com.intellij.psi.xml.XmlTokenType; import com.intellij.util.containers.ContainerUtil; import com.intellij.xml.XmlExtension; import com.intellij.xml.util.XmlUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Objects; import java.util.Set; import java.util.stream.Stream; import static com.intellij.util.ObjectUtils.doIfNotNull; import static com.intellij.util.ObjectUtils.tryCast; public final class XmlTagNameSynchronizer implements EditorFactoryListener { private static final Key<Boolean> SKIP_COMMAND = Key.create("tag.name.synchronizer.skip.command"); private static final Logger LOG = Logger.getInstance(XmlTagNameSynchronizer.class); private static final Set<Language> SUPPORTED_LANGUAGES = Set.of(HTMLLanguage.INSTANCE, XMLLanguage.INSTANCE, XHTMLLanguage.INSTANCE); private static final Key<TagNameSynchronizer> SYNCHRONIZER_KEY = Key.create("tag_name_synchronizer"); private XmlTagNameSynchronizer() {} private static void createSynchronizerFor(Editor editor) { Project project = editor.getProject(); if (project == null || !(editor instanceof EditorImpl)) { return; } Document document = editor.getDocument(); VirtualFile file = FileDocumentManager.getInstance().getFile(document); Language language = findXmlLikeLanguage(project, file); if (language != null) { new TagNameSynchronizer((EditorImpl)editor, project, language).listenForDocumentChanges(); } } private static void recreateSynchronizers() { for (Editor editor : EditorFactory.getInstance().getAllEditors()) { TagNameSynchronizer synchronizer = editor.getUserData(SYNCHRONIZER_KEY); if (synchronizer != null) { Disposer.dispose(synchronizer); } createSynchronizerFor(editor); } } private static @NotNull Stream<TagNameSynchronizer> findSynchronizers(@Nullable Document document) { if (document == null || !WebEditorOptions.getInstance().isSyncTagEditing()) { return Stream.empty(); } return EditorFactory.getInstance().editors(document, null) .map(editor -> editor.getUserData(SYNCHRONIZER_KEY)) .filter(Objects::nonNull); } private static Language findXmlLikeLanguage(Project project, VirtualFile file) { final PsiFile psiFile = file != null && file.isValid() ? PsiManager.getInstance(project).findFile(file) : null; if (psiFile != null) { for (Language language : psiFile.getViewProvider().getLanguages()) { if ((ContainerUtil.find(SUPPORTED_LANGUAGES, language::isKindOf) != null) && !(language instanceof TemplateLanguage) || XmlTypedHandlersAdditionalSupport.supportsTypedHandlers(psiFile, language)) { return language; } } } return null; } public static void runWithoutCancellingSyncTagsEditing(@NotNull Document document, @NotNull Runnable runnable) { document.putUserData(SKIP_COMMAND, Boolean.TRUE); try { runnable.run(); } finally { document.putUserData(SKIP_COMMAND, null); } } public static class MyEditorFactoryListener implements EditorFactoryListener { @Override public void editorCreated(@NotNull EditorFactoryEvent event) { createSynchronizerFor(event.getEditor()); } } static final class MyCommandListener implements CommandListener { @Override public void beforeCommandFinished(@NotNull CommandEvent event) { findSynchronizers(event.getDocument()).forEach(synchronizer -> synchronizer.beforeCommandFinished()); } } public static class MyDynamicPluginListener implements DynamicPluginListener { @Override public void pluginLoaded(@NotNull IdeaPluginDescriptor pluginDescriptor) { recreateSynchronizers(); } @Override public void pluginUnloaded(@NotNull IdeaPluginDescriptor pluginDescriptor, boolean isUpdate) { recreateSynchronizers(); } } private static final class TagNameSynchronizer implements DocumentListener, Disposable { private static final Key<Couple<RangeMarker>> MARKERS_KEY = Key.create("tag.name.synchronizer.markers"); private final PsiDocumentManagerBase myDocumentManager; private final Language myLanguage; private final EditorImpl myEditor; private final Project myProject; private boolean myApplying; private TagNameSynchronizer(EditorImpl editor, Project project, Language language) { myEditor = editor; myLanguage = language; myDocumentManager = (PsiDocumentManagerBase)PsiDocumentManager.getInstance(project); myProject = project; } @Override public void dispose() { myEditor.putUserData(SYNCHRONIZER_KEY, null); } private void listenForDocumentChanges() { Disposer.register(myEditor.getDisposable(), this); myEditor.getDocument().addDocumentListener(this, this); myEditor.putUserData(SYNCHRONIZER_KEY, this); } @Override public void beforeDocumentChange(@NotNull DocumentEvent event) { if (!WebEditorOptions.getInstance().isSyncTagEditing()) return; final Document document = event.getDocument(); Project project = Objects.requireNonNull(myEditor.getProject()); if (myApplying || project.isDefault() || UndoManager.getInstance(project).isUndoInProgress() || !PomModelImpl.isAllowPsiModification() || document.isInBulkUpdate()) { return; } final int offset = event.getOffset(); final int oldLength = event.getOldLength(); final CharSequence fragment = event.getNewFragment(); final int newLength = event.getNewLength(); if (document.getUserData(SKIP_COMMAND) == Boolean.TRUE) { // xml completion inserts extra space after tag name to ensure correct parsing // js auto-import may change beginning of the document when component is imported // we need to ignore it return; } Caret caret = myEditor.getCaretModel().getCurrentCaret(); for (int i = 0; i < newLength; i++) { if (!isValidTagNameChar(fragment.charAt(i))) { clearMarkers(caret); return; } } Couple<RangeMarker> markers = caret.getUserData(MARKERS_KEY); if (markers != null && !fitsInMarker(markers, offset, oldLength)) { clearMarkers(caret); markers = null; } if (markers == null) { final PsiFile file = myDocumentManager.getPsiFile(document); if (file == null || myDocumentManager.getSynchronizer().isInSynchronization(document)) return; final RangeMarker leader = createTagNameMarker(caret); if (leader == null) return; leader.setGreedyToLeft(true); leader.setGreedyToRight(true); if (myDocumentManager.isUncommited(document)) { myDocumentManager.commitDocument(document); } final RangeMarker support = findSupport(leader, file, document); if (support == null) return; support.setGreedyToLeft(true); support.setGreedyToRight(true); markers = Couple.of(leader, support); if (!fitsInMarker(markers, offset, oldLength)) return; caret.putUserData(MARKERS_KEY, markers); } } private static boolean fitsInMarker(Couple<RangeMarker> markers, int offset, int oldLength) { RangeMarker leader = markers.first; return leader.isValid() && offset >= leader.getStartOffset() && offset + oldLength <= leader.getEndOffset(); } private static void clearMarkers(Caret caret) { Couple<RangeMarker> markers = caret.getUserData(MARKERS_KEY); if (markers != null) { markers.first.dispose(); markers.second.dispose(); caret.putUserData(MARKERS_KEY, null); } } private RangeMarker createTagNameMarker(Caret caret) { final int offset = caret.getOffset(); final Document document = myEditor.getDocument(); final CharSequence sequence = document.getCharsSequence(); int start = -1; boolean seenColon = false; for (int i = offset - 1; i >= Math.max(0, offset - 50); i--) { try { final char c = sequence.charAt(i); if (c == '<' || c == '/' && i > 0 && sequence.charAt(i - 1) == '<') { start = i + 1; break; } if (!isValidTagNameChar(c)) break; seenColon |= c == ':'; } catch (IndexOutOfBoundsException e) { LOG.error("incorrect offset:" + i + ", initial: " + offset, new Attachment("document.txt", sequence.toString())); return null; } } if (start < 0) return null; int end = -1; for (int i = offset; i < Math.min(document.getTextLength(), offset + 50); i++) { final char c = sequence.charAt(i); if (!isValidTagNameChar(c) || seenColon && c == ':') { end = i; break; } seenColon |= c == ':'; } if (end < 0 || start > end) return null; return document.createRangeMarker(start, end, true); } void beforeCommandFinished() { CaretAction action = caret -> { Couple<RangeMarker> markers = caret.getUserData(MARKERS_KEY); if (markers == null || !markers.first.isValid() || !markers.second.isValid()) return; final Document document = myEditor.getDocument(); final Runnable apply = () -> { final RangeMarker leader = markers.first; final RangeMarker support = markers.second; if (document.getTextLength() < leader.getEndOffset()) { return; } final String name = document.getText(new TextRange(leader.getStartOffset(), leader.getEndOffset())); if (document.getTextLength() >= support.getEndOffset() && !name.equals(document.getText(new TextRange(support.getStartOffset(), support.getEndOffset())))) { document.replaceString(support.getStartOffset(), support.getEndOffset(), name); } }; ApplicationManager.getApplication().runWriteAction(() -> { final LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(myEditor); if (lookup != null) { lookup.performGuardedChange(apply); } else { apply.run(); } }); }; myApplying = true; try { if (myEditor.getCaretModel().isIteratingOverCarets()) { action.perform(myEditor.getCaretModel().getCurrentCaret()); } else { myEditor.getCaretModel().runForEachCaret(action); } } finally { myApplying = false; } } private RangeMarker findSupport(RangeMarker leader, PsiFile file, Document document) { final TextRange leaderRange = new TextRange(leader.getStartOffset(), leader.getEndOffset()); final int offset = leader.getStartOffset(); PsiElement element = findNameElement(InjectedLanguageUtil.findElementAtNoCommit(file, offset)); TextRange support = findSupportRange(element); if (!isSupportRangeValid(document, leaderRange, support) && file.getViewProvider() instanceof MultiplePsiFilesPerDocumentFileViewProvider) { element = findNameElement(file.getViewProvider().findElementAt(offset, myLanguage)); support = findSupportRange(element); } if (!isSupportRangeValid(document, leaderRange, support)) return findSupportForEmptyTag(leader, element, document); return document.createRangeMarker(support.getStartOffset(), support.getEndOffset(), true); } private static PsiElement findNameElement(@Nullable PsiElement element) { return element instanceof OuterLanguageElement ? TemplateLanguageUtil.getSameLanguageTreeNext(element) : element; } private boolean isValidTagNameChar(char c) { if (XmlUtil.isValidTagNameChar(c)) return true; final XmlExtension extension = getXmlExtension(); if (extension == null) return false; return extension.isValidTagNameChar(c); } @Nullable private XmlExtension getXmlExtension() { Document document = myEditor.getDocument(); VirtualFile file = FileDocumentManager.getInstance().getFile(document); PsiFile psiFile = file != null && file.isValid() ? PsiManager.getInstance(myProject).findFile(file) : null; if (psiFile == null) { return null; } return XmlExtension.getExtension(psiFile); } private static RangeMarker findSupportForEmptyTag(RangeMarker leader, PsiElement element, Document document) { int offset = leader.getStartOffset(); if (offset != leader.getEndOffset() || element == null || offset == 0) return null; XmlTag tag = tryCast(element.getParent(), XmlTag.class); CharSequence contents = document.getCharsSequence(); if (tag != null && tag.getName().isEmpty()) { // Support JSX empty tags PsiElement startTag = tag.getFirstChild(); PsiElement endTag = tag.getLastChild(); if (endTag instanceof XmlToken && endTag.getNode().getElementType() == XmlTokenType.XML_TAG_END) { endTag = PsiTreeUtil.skipWhitespacesBackward(endTag); } if (startTag != endTag && startTag instanceof LeafPsiElement && endTag instanceof LeafPsiElement) { int startTagOffset = startTag.getNode().getStartOffset(); int endTagOffset = endTag.getNode().getStartOffset(); if (startTagOffset + 1 == offset) { return contents.charAt(endTagOffset) == '<' && contents.charAt(endTagOffset + 1) == '/' ? document.createRangeMarker(endTagOffset + 2, endTagOffset + 2, true) : null; } else if (endTagOffset + 2 == offset) { return contents.charAt(startTagOffset) == '<' ? document.createRangeMarker(startTagOffset + 1, startTagOffset + 1, true) : null; } } return null; } TextRange range = doIfNotNull(doIfNotNull(element.getParent(), PsiElement::getParent), PsiElement::getTextRange); if (range == null) return null; int length = contents.length(); char prev = contents.charAt(offset - 1); if (prev == '<') { int nextCharPos = StringUtil.skipWhitespaceForward(contents, offset); if (nextCharPos >= 0 && nextCharPos < length && contents.charAt(nextCharPos) == '>') { // Try to find matching empty closing tag int endTagStart = StringUtil.indexOf(contents, '<', nextCharPos, range.getEndOffset()); int endTagEnd = endTagStart > 0 && endTagStart < length - 1 && contents.charAt(endTagStart + 1) == '/' ? StringUtil.skipWhitespaceForward(contents, endTagStart + 2) : -1; if (endTagEnd > 0 && endTagEnd < length && contents.charAt(endTagEnd) == '>') { // Sync with it return document.createRangeMarker(endTagStart + 2, endTagStart + 2, true); } else { // Otherwise create a self pointing sync to avoid code destruction return document.createRangeMarker(offset, offset, true); } } } else if (prev == '/' && offset >= 2 && contents.charAt(offset - 2) == '<') { int nextCharPos = StringUtil.skipWhitespaceForward(contents, offset); if (nextCharPos >= 0 && nextCharPos < length && contents.charAt(nextCharPos) == '>') { // Try to find matching empty opening tag int startTagEnd = StringUtil.lastIndexOf(contents, '>', range.getStartOffset(), offset - 2); int startTagStart = startTagEnd > 0 ? StringUtil.skipWhitespaceBackward(contents, startTagEnd) - 1 : -1; if (startTagStart > 0 && startTagStart < length && contents.charAt(startTagStart) == '<') { return document.createRangeMarker(startTagStart + 1, startTagStart + 1, true); } else { // Otherwise create a self pointing sync to avoid code destruction return document.createRangeMarker(offset, offset, true); } } } return null; } private static boolean isSupportRangeValid(@NotNull Document document, @NotNull TextRange leader, @Nullable TextRange support) { if (support == null) return false; return document.getText(leader).equals(document.getText(support)); } @Nullable private static TextRange findSupportRange(@Nullable PsiElement leader) { if (leader == null || TreeUtil.findSibling(leader.getNode(), XmlTokenType.XML_TAG_END) == null) return null; PsiElement support = RenameTagBeginOrEndIntentionAction.findOtherSide(leader, false); if (support == null || leader == support) support = RenameTagBeginOrEndIntentionAction.findOtherSide(leader, true); if (support == null) return null; final int start = findSupportRangeStart(support); final int end = findSupportRangeEnd(support); final TextRange supportRange = TextRange.create(start, end); return InjectedLanguageManager.getInstance(leader.getProject()).injectedToHost(leader.getContainingFile(), supportRange); } private static int findSupportRangeStart(@NotNull PsiElement support) { PsiElement current = support; while (current.getPrevSibling() instanceof OuterLanguageElement) { current = current.getPrevSibling(); } return current.getTextRange().getStartOffset(); } private static int findSupportRangeEnd(@NotNull PsiElement support) { PsiElement current = support; while (current.getNextSibling() instanceof OuterLanguageElement) { current = current.getNextSibling(); } return current.getTextRange().getEndOffset(); } } }
package com.swfarm.biz.amazon.bo; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import com.swfarm.biz.oa.bo.User; import com.swfarm.pub.utils.NumberUtils; public class AmazonProductGroup implements Serializable { private static final long serialVersionUID = 1L; private Long id; private String name; private String description; private String accountNumbers; private String sellingSites; private String sellingCurrencies; private Integer criteriaLowerOneDaySaleQuantity; private Integer criteriaLowerThreeDaysSaleQuantity; private Integer criteriaLowerSevenDaysSaleQuantity; private Integer criteriaLowerThirtyDaysSaleQuantity; private Integer criteriaHigherOneDaySaleQuantity; private Integer criteriaHigherThreeDaysSaleQuantity; private Integer criteriaHigherSevenDaysSaleQuantity; private Integer criteriaHigherThirtyDaysSaleQuantity; private String asins; private Set amazonProducts = new HashSet(); private Integer amazonProductCount; private User groupOwner; private Long groupOwnerId; private Double oneDaySaleTrend; private Double threeDaysSaleTrend; private Double sevenDaysSaleTrend; private Integer yesterdaySaleQuantity; private Integer threeDaysSaleQuantity; private Integer sevenDaysSaleQuantity; private Integer thirtyDaysSaleQuantity; private Double decreasePricePercentage; private Double increasePricePercentage; private Date enrollDate; private Boolean isLocked; private Boolean isAutoAdjust; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String comments) { this.description = comments; } public String getAccountNumbers() { return accountNumbers; } public List getAccountNumberList() { List accountNumberList = new ArrayList(); if (StringUtils.isNotEmpty(accountNumbers)) { String[] accountNumberArr = StringUtils.split(accountNumbers, ","); if (!ArrayUtils.isEmpty(accountNumberArr)) { CollectionUtils.addAll(accountNumberList, accountNumberArr); } } return accountNumberList; } public void setAccountNumberList(List accountNumberList) { if (CollectionUtils.isNotEmpty(accountNumberList)) { this.accountNumbers = StringUtils.join(accountNumberList, ","); } else { this.accountNumbers = null; } } public void addAccountNumber(String accountNumber) { List accountNumberList = this.getAccountNumberList(); if (!accountNumberList.contains(accountNumber)) { accountNumberList.add(accountNumber); this.setAccountNumberList(accountNumberList); } } public void removeAccountNumber(String accountNumber) { List accountNumberList = this.getAccountNumberList(); if (accountNumberList.contains(accountNumber)) { accountNumberList.remove(accountNumber); this.setAccountNumberList(accountNumberList); } } public void setAccountNumbers(String accountNumbers) { this.accountNumbers = accountNumbers; } public String getSellingSites() { return sellingSites; } public List getSellingSiteList() { List sellingSiteList = new ArrayList(); if (StringUtils.isNotEmpty(sellingSites)) { String[] sellingSiteArr = StringUtils.split(sellingSites, ","); if (!ArrayUtils.isEmpty(sellingSiteArr)) { CollectionUtils.addAll(sellingSiteList, sellingSiteArr); } } return sellingSiteList; } public void setSellingSiteList(List sellingSiteList) { if (CollectionUtils.isNotEmpty(sellingSiteList)) { this.sellingSites = StringUtils.join(sellingSiteList, ","); } else { this.sellingSites = null; } } public void addSellingSite(String sellingSite) { List sellingSiteList = this.getSellingSiteList(); if (!sellingSiteList.contains(sellingSite)) { sellingSiteList.add(sellingSite); this.setSellingSiteList(sellingSiteList); } } public void removeSellingSite(String sellingSite) { List sellingSiteList = this.getSellingSiteList(); if (sellingSiteList.contains(sellingSite)) { sellingSiteList.remove(sellingSite); this.setSellingSiteList(sellingSiteList); } } public void setSellingSites(String sellingSites) { this.sellingSites = sellingSites; } public String getSellingCurrencies() { return sellingCurrencies; } public List getSellingCurrencyList() { List sellingCurrencyList = new ArrayList(); if (StringUtils.isNotEmpty(sellingCurrencies)) { String[] sellingCurrencyArr = StringUtils.split(sellingCurrencies, ","); if (!ArrayUtils.isEmpty(sellingCurrencyArr)) { CollectionUtils.addAll(sellingCurrencyList, sellingCurrencyArr); } } return sellingCurrencyList; } public void setSellingCurrencyList(List sellingCurrencyList) { if (CollectionUtils.isNotEmpty(sellingCurrencyList)) { this.sellingCurrencies = StringUtils.join(sellingCurrencyList, ","); } else { this.sellingCurrencies = null; } } public void addSellingCurrency(String sellingCurrency) { List sellingCurrencyList = this.getSellingCurrencyList(); if (!sellingCurrencyList.contains(sellingCurrency)) { sellingCurrencyList.add(sellingCurrency); this.setSellingCurrencyList(sellingCurrencyList); } } public void removeSellingCurrency(String sellingCurrency) { List sellingCurrencyList = this.getSellingCurrencyList(); if (sellingCurrencyList.contains(sellingCurrency)) { sellingCurrencyList.remove(sellingCurrency); this.setSellingCurrencyList(sellingCurrencyList); } } public void setSellingCurrencies(String sellingCurrencies) { this.sellingCurrencies = sellingCurrencies; } public Integer getCriteriaLowerOneDaySaleQuantity() { return criteriaLowerOneDaySaleQuantity; } public void setCriteriaLowerOneDaySaleQuantity( Integer criteriaLowerOneDaySaleQuantity) { this.criteriaLowerOneDaySaleQuantity = criteriaLowerOneDaySaleQuantity; } public Integer getCriteriaLowerThreeDaysSaleQuantity() { return criteriaLowerThreeDaysSaleQuantity; } public void setCriteriaLowerThreeDaysSaleQuantity( Integer criteriaLowerThreeDaysSaleQuantity) { this.criteriaLowerThreeDaysSaleQuantity = criteriaLowerThreeDaysSaleQuantity; } public Integer getCriteriaLowerSevenDaysSaleQuantity() { return criteriaLowerSevenDaysSaleQuantity; } public void setCriteriaLowerSevenDaysSaleQuantity( Integer criteriaLowerSevenDaysSaleQuantity) { this.criteriaLowerSevenDaysSaleQuantity = criteriaLowerSevenDaysSaleQuantity; } public Integer getCriteriaLowerThirtyDaysSaleQuantity() { return criteriaLowerThirtyDaysSaleQuantity; } public void setCriteriaLowerThirtyDaysSaleQuantity( Integer criteriaLowerThirtyDaysSaleQuantity) { this.criteriaLowerThirtyDaysSaleQuantity = criteriaLowerThirtyDaysSaleQuantity; } public Integer getCriteriaHigherOneDaySaleQuantity() { return criteriaHigherOneDaySaleQuantity; } public void setCriteriaHigherOneDaySaleQuantity( Integer criteriaHigherOneDaySaleQuantity) { this.criteriaHigherOneDaySaleQuantity = criteriaHigherOneDaySaleQuantity; } public Integer getCriteriaHigherThreeDaysSaleQuantity() { return criteriaHigherThreeDaysSaleQuantity; } public void setCriteriaHigherThreeDaysSaleQuantity( Integer criteriaHigherThreeDaysSaleQuantity) { this.criteriaHigherThreeDaysSaleQuantity = criteriaHigherThreeDaysSaleQuantity; } public Integer getCriteriaHigherSevenDaysSaleQuantity() { return criteriaHigherSevenDaysSaleQuantity; } public void setCriteriaHigherSevenDaysSaleQuantity( Integer criteriaHigherSevenDaysSaleQuantity) { this.criteriaHigherSevenDaysSaleQuantity = criteriaHigherSevenDaysSaleQuantity; } public Integer getCriteriaHigherThirtyDaysSaleQuantity() { return criteriaHigherThirtyDaysSaleQuantity; } public void setCriteriaHigherThirtyDaysSaleQuantity( Integer criteriaHigherThirtyDaysSaleQuantity) { this.criteriaHigherThirtyDaysSaleQuantity = criteriaHigherThirtyDaysSaleQuantity; } public Double getDecreasePricePercentage() { if (decreasePricePercentage == null) { decreasePricePercentage = 0d; } return decreasePricePercentage; } public void setDecreasePricePercentage(Double decreasePricePercentage) { this.decreasePricePercentage = decreasePricePercentage; } public Double getIncreasePricePercentage() { if (increasePricePercentage == null) { increasePricePercentage = 0d; } return increasePricePercentage; } public void setIncreasePricePercentage(Double increasePricePercentage) { this.increasePricePercentage = increasePricePercentage; } public String getAsins() { return asins; } public List getAsinList() { List asinList = new ArrayList(); if (CollectionUtils.isNotEmpty(this.getAmazonProducts())) { for (Iterator iter = amazonProducts.iterator(); iter.hasNext();) { AmazonProduct amazonProduct = (AmazonProduct) iter.next(); String asin = amazonProduct.getAsin(); if (!asinList.contains(asin)) { asinList.add(asin); } } Collections.sort(asinList); } return asinList; } public String getAsinsStr() { List asinList = this.getAsinList(); if (CollectionUtils.isNotEmpty(asinList)) { return StringUtils.join(asinList, ","); } return null; } public void setAsins(String asins) { this.asins = asins; } public Integer getAmazonProductCount() { if (amazonProductCount == null) { amazonProductCount = 0; } return amazonProductCount; } public void setAmazonProductCount(Integer amazonProductCount) { this.amazonProductCount = amazonProductCount; } public Set getAmazonProducts() { if (amazonProducts == null) { amazonProducts = new HashSet(); } return amazonProducts; } public void removeAmazonProduct(String asin) { for (Iterator iter = this.getAmazonProducts().iterator(); iter .hasNext();) { AmazonProduct amazonProduct = (AmazonProduct) iter.next(); if (amazonProduct.getAsin().equals(asin)) { iter.remove(); } } } public List getAmazonProductList() { List amazonProductList = new ArrayList(); amazonProductList.addAll(this.getAmazonProducts()); if (CollectionUtils.isNotEmpty(amazonProductList)) { Collections.sort(amazonProductList, new Comparator() { @Override public int compare(Object obj1, Object obj2) { AmazonProduct amazonProduct1 = (AmazonProduct) obj1; AmazonProduct amazonProduct2 = (AmazonProduct) obj2; return amazonProduct1.getAsin().compareTo( amazonProduct2.getAsin()); } }); } return amazonProductList; } public void clearAmazonProducts() { this.getAmazonProducts().clear(); } public void setAmazonProducts(Set amazonProducts) { this.amazonProducts = amazonProducts; } public User getGroupOwner() { return groupOwner; } public void setGroupOwner(User groupOwner) { this.groupOwner = groupOwner; } public Long getGroupOwnerId() { return groupOwnerId; } public void setGroupOwnerId(Long groupOwnerId) { this.groupOwnerId = groupOwnerId; } public Double getOneDaySaleTrend() { this.calOneDaySaleTrend(); return oneDaySaleTrend; } public void calOneDaySaleTrend() { if (this.getThreeDaysSaleQuantity() > 0) { this.oneDaySaleTrend = NumberUtils.round( this.getYesterdaySaleQuantity() * 100 / this.getThreeDaysSaleQuantity() * 0.01, 2); } } public void setOneDaySaleTrend(Double oneDaySaleTrend) { this.oneDaySaleTrend = oneDaySaleTrend; } public Double getThreeDaysSaleTrend() { this.calThreeDaysSaleTrend(); return threeDaysSaleTrend; } public void calThreeDaysSaleTrend() { if (this.getSevenDaysSaleQuantity() > 0) { this.threeDaysSaleTrend = NumberUtils.round( this.getThreeDaysSaleQuantity() * 100 / this.getSevenDaysSaleQuantity() * 0.01, 2); } } public void setThreeDaysSaleTrend(Double threeDaysSaleTrend) { this.threeDaysSaleTrend = threeDaysSaleTrend; } public Double getSevenDaysSaleTrend() { this.calSevenDaysSaleTrend(); return sevenDaysSaleTrend; } public void calSevenDaysSaleTrend() { if (this.getThirtyDaysSaleQuantity() > 0) { this.sevenDaysSaleTrend = NumberUtils.round( this.getSevenDaysSaleQuantity() * 100 / this.getThirtyDaysSaleQuantity() * 0.01, 2); } } public void setSevenDaysSaleTrend(Double sevenDaysSaleTrend) { this.sevenDaysSaleTrend = sevenDaysSaleTrend; } public Integer getYesterdaySaleQuantity() { if (yesterdaySaleQuantity == null) { yesterdaySaleQuantity = 0; } return yesterdaySaleQuantity; } public void setYesterdaySaleQuantity(Integer yesterdaySaleQuantity) { this.yesterdaySaleQuantity = yesterdaySaleQuantity; } public Integer getThreeDaysSaleQuantity() { if (threeDaysSaleQuantity == null) { threeDaysSaleQuantity = 0; } return threeDaysSaleQuantity; } public void setThreeDaysSaleQuantity(Integer threeDaysSaleQuantity) { this.threeDaysSaleQuantity = threeDaysSaleQuantity; } public Integer getSevenDaysSaleQuantity() { if (sevenDaysSaleQuantity == null) { sevenDaysSaleQuantity = 0; } return sevenDaysSaleQuantity; } public void setSevenDaysSaleQuantity(Integer sevenDaysSaleQuantity) { this.sevenDaysSaleQuantity = sevenDaysSaleQuantity; } public Integer getThirtyDaysSaleQuantity() { if (thirtyDaysSaleQuantity == null) { thirtyDaysSaleQuantity = 0; } return thirtyDaysSaleQuantity; } public void setThirtyDaysSaleQuantity(Integer thirtyDaysSaleQuantity) { this.thirtyDaysSaleQuantity = thirtyDaysSaleQuantity; } public Date getEnrollDate() { if (enrollDate == null) { enrollDate = new Date(); } return enrollDate; } public void setEnrollDate(Date enrollDate) { this.enrollDate = enrollDate; } public Boolean getIsLocked() { if (isLocked == null) { isLocked = false; } return isLocked; } public void setIsLocked(Boolean isLocked) { this.isLocked = isLocked; } public Boolean getIsAutoAdjust() { if (isAutoAdjust == null) { isAutoAdjust = false; } return isAutoAdjust; } public void setIsAutoAdjust(Boolean isAutoAdjust) { this.isAutoAdjust = isAutoAdjust; } }
package com.marshalchen.common.uimodule.square_progressbar; import android.content.Context; import android.graphics.Color; import android.graphics.ColorMatrix; import android.graphics.ColorMatrixColorFilter; import android.util.AttributeSet; import android.view.LayoutInflater; import android.widget.ImageView; import android.widget.ImageView.ScaleType; import android.widget.RelativeLayout; import com.marshalchen.common.uimodule.widgets.R; import com.marshalchen.common.uimodule.square_progressbar.utils.CalculationUtil; import com.marshalchen.common.uimodule.square_progressbar.utils.PercentStyle; /** * The basic {@link SquareProgressBar}. THis class includes all the methods you * need to modify your {@link SquareProgressBar}. * * @author ysigner * @since 1.0.0 */ public class SquareProgressBar extends RelativeLayout { private ImageView imageView; private final SquareProgressView bar; private boolean opacity = false; private boolean greyscale; private boolean isFadingOnProgress = false; /** * New SquareProgressBar. * * @param context * the {@link android.content.Context} * @param attrs * an {@link android.util.AttributeSet} * @param defStyle * a defined style. * @since 1.0.0 */ public SquareProgressBar(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); LayoutInflater mInflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); mInflater.inflate(R.layout.square_progress_bar_view, this, true); bar = (SquareProgressView) findViewById(R.id.squareProgressBar1); bar.bringToFront(); } /** * New SquareProgressBar. * * @param context * the {@link android.content.Context} * @param attrs * an {@link android.util.AttributeSet} * @since 1.0.0 */ public SquareProgressBar(Context context, AttributeSet attrs) { super(context, attrs); LayoutInflater mInflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); mInflater.inflate(R.layout.square_progress_bar_view, this, true); bar = (SquareProgressView) findViewById(R.id.squareProgressBar1); bar.bringToFront(); } /** * New SquareProgressBar. * * @param context * @since 1.0.0 */ public SquareProgressBar(Context context) { super(context); LayoutInflater mInflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); mInflater.inflate(R.layout.square_progress_bar_view, this, true); bar = (SquareProgressView) findViewById(R.id.squareProgressBar1); bar.bringToFront(); } /** * Sets the image of the {@link SquareProgressBar}. Must be a valid * ressourceId. * * @param image * the image as a ressourceId * @since 1.0 */ public void setImage(int image) { imageView = (ImageView) findViewById(R.id.imageView1); imageView.setImageResource(image); } /** * Sets the image scale type according to {@link android.widget.ImageView.ScaleType}. * * @param scale * the image ScaleType * @since 1.3.0 * @author thiagokimo */ public void setImageScaleType(ScaleType scale) { imageView.setScaleType(scale); } /** * Sets the progress of the {@link SquareProgressBar}. If opacity is * selected then here it sets it. See {@link #setOpacity(boolean)} for more * information. * * @param progress * the progress * @since 1.0.0 */ public void setProgress(double progress) { bar.setProgress(progress); if (opacity) { if (isFadingOnProgress) { setOpacity(100 - (int) progress); } else { setOpacity((int) progress); } } else { setOpacity(100); } } /** * Sets the colour of the {@link SquareProgressBar} to a predefined android * holo color. <br/> * <b>Examples:</b> * <ul> * <li>holo_blue_bright</li> * <li>holo_blue_dark</li> * <li>holo_blue_light</li> * <li>holo_green_dark</li> * <li>holo_green_light</li> * <li>holo_orange_dark</li> * <li>holo_orange_light</li> * <li>holo_purple</li> * <li>holo_red_dark</li> * <li>holo_red_light</li> * </ul> * * @param androidHoloColor * @since 1.0.0 */ public void setHoloColor(int androidHoloColor) { bar.setColor(getContext().getResources().getColor(androidHoloColor)); } /** * Sets the colour of the {@link SquareProgressBar}. YOu can give it a * hex-color string like <i>#C9C9C9</i>. * * @param colorString * the colour of the {@link SquareProgressBar} * @since 1.1.0 */ public void setColor(String colorString) { bar.setColor(Color.parseColor(colorString)); } /** * This sets the colour of the {@link SquareProgressBar} with a RGB colour. * * @param r * red * @param g * green * @param b * blue * @since 1.1.0 */ public void setColorRGB(int r, int g, int b) { bar.setColor(Color.rgb(r, g, b)); } /** * This sets the colour of the {@link SquareProgressBar} with a RGB colour. * Works when used with * <code>android.graphics.Color.rgb(int, int, int)</code> * * @param * * @param * * @param * * @since 1.4.0 */ public void setColorRGB(int rgb) { bar.setColor(rgb); } /** * This sets the width of the {@link SquareProgressBar}. * * @param width * in Dp * @since 1.1.0 */ public void setWidth(int width) { int padding = CalculationUtil.convertDpToPx(width, getContext()); imageView.setPadding(padding, padding, padding, padding); bar.setWidthInDp(width); } /** * This sets the alpha of the image in the view. Actually I need to use the * deprecated method here as the new one is only available for the API-level * 16. And the min API level of this library is 14. * * Use this only as private method. * * @param progress * the progress */ private void setOpacity(int progress) { imageView.setAlpha((int) (2.55 * progress)); } /** * Switches the opacity state of the image. This forces the * SquareProgressBar to redraw with the current progress. As bigger the * progress is, then more of the image comes to view. If the progress is 0, * then you can't see the image at all. If the progress is 100, the image is * shown full. * * @param opacity * true if opacity should be enabled. * @since 1.2.0 */ public void setOpacity(boolean opacity) { this.opacity = opacity; setProgress(bar.getProgress()); } /** * Switches the opacity state of the image. This forces the * SquareProgressBar to redraw with the current progress. As bigger the * progress is, then more of the image comes to view. If the progress is 0, * then you can't see the image at all. If the progress is 100, the image is * shown full. * * You can also set the flag if the fading should get inverted so the image * disappears when the progress increases. * * @param opacity * true if opacity should be enabled. * @param isFadingOnProgress * default false. This changes the behavior the opacity works. If * the progress increases then the images fades. When the * progress reaches 100, then the image disappears. * @since 1.4.0 */ public void setOpacity(boolean opacity, boolean isFadingOnProgress) { this.opacity = opacity; this.isFadingOnProgress = isFadingOnProgress; setProgress(bar.getProgress()); } /** * You can set the image to b/w with this method. Works fine with the * opacity. * * @param greyscale * true if the grayscale should be activated. * @since 1.2.0 */ public void setImageGrayscale(boolean greyscale) { this.greyscale = greyscale; if (greyscale) { ColorMatrix matrix = new ColorMatrix(); matrix.setSaturation(0); imageView.setColorFilter(new ColorMatrixColorFilter(matrix)); } else { imageView.setColorFilter(null); } } /** * If opacity is enabled. * * @return true if opacity is enabled. */ public boolean isOpacity() { return opacity; } /** * If greyscale is enabled. * * @return true if greyscale is enabled. */ public boolean isGreyscale() { return greyscale; } /** * Draws an outline of the progressbar. Looks quite cool in some situations. * * @param drawOutline * true if it should or not. * @since 1.3.0 */ public void drawOutline(boolean drawOutline) { bar.setOutline(drawOutline); } /** * If outline is enabled or not. * * @return true if outline is enabled. */ public boolean isOutline() { return bar.isOutline(); } /** * Draws the startline. this is the line where the progressbar starts the * drawing around the image. * * @param drawStartline * true if it should or not. * @since 1.3.0 */ public void drawStartline(boolean drawStartline) { bar.setStartline(drawStartline); } /** * If the startline is enabled. * * @return true if startline is enabled or not. */ public boolean isStartline() { return bar.isStartline(); } /** * Defines if the percent text should be shown or not. To modify the text * checkout {@link #setPercentStyle(com.marshalchen.common.uimodule.square_progressbar.utils.PercentStyle)}. * * @param showProgress * true if it should or not. * @since 1.3.0 */ public void showProgress(boolean showProgress) { bar.setShowProgress(showProgress); } /** * If the progress text inside of the image is enabled. * * @return true if it is or not. */ public boolean isShowProgress() { return bar.isShowProgress(); } /** * Sets a custom percent style to the text inside the image. Make sure you * set {@link #showProgress(boolean)} to true. Otherwise it doesn't shows. * The default settings are:</br> * <table> * <tr> * <th>Text align</td> * <td>CENTER</td> * </tr> * <tr> * <th>Text size</td> * <td>150 [dp]</td> * </tr> * <tr> * <th>Display percentsign</td> * <td>true</td> * </tr> * <tr> * <th>Custom text</td> * <td>%</td> * </tr> * </table> * * @param percentStyle */ public void setPercentStyle(PercentStyle percentStyle) { bar.setPercentStyle(percentStyle); } /** * Returns the {@link com.marshalchen.common.uimodule.square_progressbar.utils.PercentStyle} of the percent text. Maybe returns the * default value, check {@link #setPercentStyle(com.marshalchen.common.uimodule.square_progressbar.utils.PercentStyle)} fo that. * * @return the percent style of the moment. */ public PercentStyle getPercentStyle() { return bar.getPercentStyle(); } /** * If the progress hits 100% then the progressbar disappears if this flag is * set to <code>true</code>. The default is set to false. * * @param clearOnHundred * if it should disappear or not. * @since 1.4.0 */ public void setClearOnHundred(boolean clearOnHundred) { bar.setClearOnHundred(clearOnHundred); } /** * If the progressbar disappears when the progress reaches 100%. * * @since 1.4.0 */ public boolean isClearOnHundred() { return bar.isClearOnHundred(); } }
/* * Copyright 2003-2011 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.psiutils; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.HashSet; import java.util.Set; public class ExpectedTypeUtils{ private ExpectedTypeUtils() {} @Nullable public static PsiType findExpectedType( @NotNull PsiExpression expression, boolean calculateTypeForComplexReferences){ PsiElement context = expression.getParent(); PsiExpression wrappedExpression = expression; while(context != null && context instanceof PsiParenthesizedExpression){ wrappedExpression = (PsiExpression) context; context = context.getParent(); } if(context == null){ return null; } final ExpectedTypeVisitor visitor = new ExpectedTypeVisitor(wrappedExpression, calculateTypeForComplexReferences); context.accept(visitor); return visitor.getExpectedType(); } private static class ExpectedTypeVisitor extends JavaElementVisitor{ /** * @noinspection StaticCollection */ private static final Set<IElementType> arithmeticOps = new HashSet<IElementType>(5); private static final Set<IElementType> comparisonOps = new HashSet<IElementType>(6); private static final Set<IElementType> booleanOps = new HashSet<IElementType>(5); private static final Set<IElementType> shiftOps = new HashSet<IElementType>(3); private static final Set<IElementType> operatorAssignmentOps = new HashSet<IElementType>(11); static { arithmeticOps.add(JavaTokenType.PLUS); arithmeticOps.add(JavaTokenType.MINUS); arithmeticOps.add(JavaTokenType.ASTERISK); arithmeticOps.add(JavaTokenType.DIV); arithmeticOps.add(JavaTokenType.PERC); comparisonOps.add(JavaTokenType.EQEQ); comparisonOps.add(JavaTokenType.NE); comparisonOps.add(JavaTokenType.LE); comparisonOps.add(JavaTokenType.LT); comparisonOps.add(JavaTokenType.GE); comparisonOps.add(JavaTokenType.GT); booleanOps.add(JavaTokenType.ANDAND); booleanOps.add(JavaTokenType.AND); booleanOps.add(JavaTokenType.XOR); booleanOps.add(JavaTokenType.OROR); booleanOps.add(JavaTokenType.OR); shiftOps.add(JavaTokenType.LTLT); shiftOps.add(JavaTokenType.GTGT); shiftOps.add(JavaTokenType.GTGTGT); operatorAssignmentOps.add(JavaTokenType.PLUSEQ); operatorAssignmentOps.add(JavaTokenType.MINUSEQ); operatorAssignmentOps.add(JavaTokenType.ASTERISKEQ); operatorAssignmentOps.add(JavaTokenType.DIVEQ); operatorAssignmentOps.add(JavaTokenType.ANDEQ); operatorAssignmentOps.add(JavaTokenType.OREQ); operatorAssignmentOps.add(JavaTokenType.XOREQ); operatorAssignmentOps.add(JavaTokenType.PERCEQ); operatorAssignmentOps.add(JavaTokenType.LTLTEQ); operatorAssignmentOps.add(JavaTokenType.GTGTEQ); operatorAssignmentOps.add(JavaTokenType.GTGTGTEQ); } private final PsiExpression wrappedExpression; private final boolean calculateTypeForComplexReferences; private PsiType expectedType = null; ExpectedTypeVisitor(PsiExpression wrappedExpression, boolean calculateTypeForComplexReferences){ this.wrappedExpression = wrappedExpression; this.calculateTypeForComplexReferences = calculateTypeForComplexReferences; } public PsiType getExpectedType(){ return expectedType; } @Override public void visitField(@NotNull PsiField field){ final PsiExpression initializer = field.getInitializer(); if(wrappedExpression.equals(initializer)){ expectedType = field.getType(); } } @Override public void visitVariable(@NotNull PsiVariable variable){ expectedType = variable.getType(); } @Override public void visitArrayInitializerExpression( PsiArrayInitializerExpression initializer){ final PsiType type = initializer.getType(); if(!(type instanceof PsiArrayType)){ expectedType = null; return; } final PsiArrayType arrayType = (PsiArrayType) type; expectedType = arrayType.getComponentType(); } @Override public void visitArrayAccessExpression( PsiArrayAccessExpression accessExpression){ final PsiExpression indexExpression = accessExpression.getIndexExpression(); if(wrappedExpression.equals(indexExpression)){ expectedType = PsiType.INT; } } @Override public void visitBinaryExpression( @NotNull PsiBinaryExpression binaryExpression) { final PsiJavaToken sign = binaryExpression.getOperationSign(); final IElementType tokenType = sign.getTokenType(); final PsiType type = binaryExpression.getType(); final PsiExpression rhs = binaryExpression.getROperand(); if (rhs == null) { expectedType = null; return; } final PsiExpression lhs = binaryExpression.getLOperand(); PsiType lhsType = lhs.getType(); if (lhsType == null) { expectedType = null; return; } PsiType rhsType = rhs.getType(); if (rhsType == null) { expectedType = null; return; } if (TypeUtils.isJavaLangString(type) || isArithmeticOperation(tokenType) || isBooleanOperation(tokenType)) { expectedType = type; } else if (isShiftOperation(tokenType)) { if (wrappedExpression.equals(lhs)) { expectedType = unaryNumericPromotion(lhsType); } else { expectedType = unaryNumericPromotion(rhsType); } } else if (ComparisonUtils.isComparisonOperation(tokenType)) { if (!ClassUtils.isPrimitive(lhsType)) { lhsType = PsiPrimitiveType.getUnboxedType(lhsType); if (lhsType == null) { expectedType = null; return; } } if (!ClassUtils.isPrimitive(rhsType)) { rhsType = PsiPrimitiveType.getUnboxedType(rhsType); if (rhsType == null) { expectedType = null; return; } } // JLS 5.6.2 Binary Numeric Promotion if (PsiType.DOUBLE.equals(lhsType) || PsiType.DOUBLE.equals(rhsType)) { expectedType = PsiType.DOUBLE; } else if (PsiType.FLOAT.equals(lhsType) || PsiType.FLOAT.equals(rhsType)) { expectedType = PsiType.FLOAT; } else if (PsiType.LONG.equals(lhsType) || PsiType.LONG.equals(rhsType)) { expectedType = PsiType.LONG; } else { expectedType = PsiType.INT; } } else { expectedType = null; } } /** * JLS 5.6.1 Unary Numeric Promotion */ private static PsiType unaryNumericPromotion(PsiType type) { if (type == null) { return null; } if (type.equalsToText("java.lang.Byte") || type.equalsToText("java.lang.Short") || type.equalsToText("java.lang.Character") || type.equalsToText("java.lang.Integer") || type.equals(PsiType.BYTE) || type.equals(PsiType.SHORT) || type.equals(PsiType.CHAR)) { return PsiType.INT; } else if (type.equalsToText("java.lang.Long")) { return PsiType.LONG; } else if (type.equalsToText("java.lang.Float")) { return PsiType.FLOAT; } else if (type.equalsToText("java.lang.Double")) { return PsiType.DOUBLE; } return type; } @Override public void visitPrefixExpression( @NotNull PsiPrefixExpression expression){ final PsiType type = expression.getType(); if (type instanceof PsiPrimitiveType) { expectedType = type; } else { expectedType = PsiPrimitiveType.getUnboxedType(type); } } @Override public void visitPostfixExpression( @NotNull PsiPostfixExpression expression){ final PsiType type = expression.getType(); if (type instanceof PsiPrimitiveType) { expectedType = type; } else { expectedType = PsiPrimitiveType.getUnboxedType(type); } } @Override public void visitWhileStatement( @NotNull PsiWhileStatement whileStatement){ expectedType = PsiType.BOOLEAN; } @Override public void visitForStatement( @NotNull PsiForStatement statement){ expectedType = PsiType.BOOLEAN; } @Override public void visitIfStatement( @NotNull PsiIfStatement statement){ expectedType = PsiType.BOOLEAN; } @Override public void visitDoWhileStatement( @NotNull PsiDoWhileStatement statement){ expectedType = PsiType.BOOLEAN; } @Override public void visitSynchronizedStatement( @NotNull PsiSynchronizedStatement statement){ final PsiManager manager = statement.getManager(); final Project project = manager.getProject(); final GlobalSearchScope scope = GlobalSearchScope.allScope(project); expectedType = PsiType.getJavaLangObject(manager, scope); } @Override public void visitAssignmentExpression( @NotNull PsiAssignmentExpression assignment){ final PsiExpression rExpression = assignment.getRExpression(); final PsiJavaToken operationSign = assignment.getOperationSign(); final IElementType tokenType = operationSign.getTokenType(); final PsiExpression lExpression = assignment.getLExpression(); final PsiType lType = lExpression.getType(); if(rExpression != null && wrappedExpression.equals(rExpression)){ if(lType == null){ expectedType = null; } else if(TypeUtils.isJavaLangString(lType)){ if(JavaTokenType.PLUSEQ.equals(tokenType)){ // e.g. String += any type expectedType = rExpression.getType(); } else{ expectedType = lType; } } else if (isOperatorAssignmentOperation(tokenType)){ if (lType instanceof PsiPrimitiveType){ expectedType = lType; } else{ expectedType = PsiPrimitiveType.getUnboxedType(lType); } } else{ expectedType = lType; } } else{ if (isOperatorAssignmentOperation(tokenType) && !(lType instanceof PsiPrimitiveType)){ expectedType = PsiPrimitiveType.getUnboxedType(lType); } else{ expectedType = lType; } } } @Override public void visitConditionalExpression( PsiConditionalExpression conditional){ final PsiExpression condition = conditional.getCondition(); if(condition.equals(wrappedExpression)){ expectedType = PsiType.BOOLEAN; } else{ expectedType = conditional.getType(); } } @Override public void visitReturnStatement( @NotNull PsiReturnStatement returnStatement){ final PsiMethod method = PsiTreeUtil.getParentOfType(returnStatement, PsiMethod.class); if(method == null){ expectedType = null; } else{ expectedType = method.getReturnType(); } } @Override public void visitDeclarationStatement( PsiDeclarationStatement declaration){ final PsiElement[] declaredElements = declaration.getDeclaredElements(); for(PsiElement declaredElement : declaredElements){ if(declaredElement instanceof PsiVariable){ final PsiVariable variable = (PsiVariable) declaredElement; final PsiExpression initializer = variable.getInitializer(); if(wrappedExpression.equals(initializer)){ expectedType = variable.getType(); return; } } } } @Override public void visitExpressionList(PsiExpressionList expressionList){ final JavaResolveResult result = findCalledMethod(expressionList); final PsiMethod method = (PsiMethod) result.getElement(); if(method == null){ expectedType = null; } else{ final int parameterPosition = getParameterPosition(expressionList, wrappedExpression); expectedType = getTypeOfParameter(result, parameterPosition); } } @NotNull private static JavaResolveResult findCalledMethod( PsiExpressionList expressionList){ final PsiElement parent = expressionList.getParent(); if(parent instanceof PsiCallExpression){ final PsiCallExpression call = (PsiCallExpression) parent; return call.resolveMethodGenerics(); } else if (parent instanceof PsiAnonymousClass) { final PsiElement grandParent = parent.getParent(); if (grandParent instanceof PsiCallExpression){ final PsiCallExpression callExpression = (PsiCallExpression)grandParent; return callExpression.resolveMethodGenerics(); } } return JavaResolveResult.EMPTY; } @Override public void visitReferenceExpression( @NotNull PsiReferenceExpression referenceExpression){ //Dave, do we need this at all? -> I think we do -- Bas if(calculateTypeForComplexReferences){ final Project project = referenceExpression.getProject(); final JavaResolveResult resolveResult = referenceExpression.advancedResolve(false); final PsiElement element = resolveResult.getElement(); PsiSubstitutor substitutor = resolveResult.getSubstitutor(); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); if(element instanceof PsiField){ final PsiField field = (PsiField) element; if (!isAccessibleFrom(field, referenceExpression)) { return; } final PsiClass aClass = field.getContainingClass(); if (aClass == null) { return; } final PsiElementFactory factory = psiFacade.getElementFactory(); expectedType = factory.createType(aClass, substitutor); } else if(element instanceof PsiMethod){ final PsiElement parent = referenceExpression.getParent(); final PsiType returnType; if (parent instanceof PsiMethodCallExpression) { final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression) parent; final PsiType type = methodCallExpression.getType(); if (!PsiType.VOID.equals(type)) { returnType = findExpectedType(methodCallExpression, true); } else { returnType = null; } } else { returnType = null; } final PsiMethod method = (PsiMethod) element; final PsiMethod superMethod = findDeepestVisibleSuperMethod(method, returnType, referenceExpression); final PsiClass aClass; if(superMethod != null){ aClass = superMethod.getContainingClass(); if (aClass == null) { return; } substitutor = TypeConversionUtil.getSuperClassSubstitutor( aClass, method.getContainingClass(), substitutor); } else{ aClass = method.getContainingClass(); if (aClass == null) { return; } } final PsiElementFactory factory = psiFacade.getElementFactory(); expectedType = factory.createType(aClass, substitutor); } else{ expectedType = null; } } } @Nullable private static PsiMethod findDeepestVisibleSuperMethod( PsiMethod method, PsiType returnType, PsiElement element){ if(method.isConstructor()){ return null; } if(method.hasModifierProperty(PsiModifier.STATIC)){ return null; } if(method.hasModifierProperty(PsiModifier.PRIVATE)){ return null; } final PsiClass aClass = method.getContainingClass(); if(aClass == null){ return null; } final PsiMethod[] superMethods = aClass.findMethodsBySignature(method, true); PsiMethod topSuper = null; PsiClass topSuperContainingClass = null; for(PsiMethod superMethod : superMethods){ final PsiClass superClass = superMethod.getContainingClass(); if (superClass == null) { continue; } if(aClass.equals(superClass)){ continue; } if(!isAccessibleFrom(superMethod, element)){ continue; } if (returnType != null) { final PsiType superReturnType = superMethod.getReturnType(); if (superReturnType == null) { continue; } if (!returnType.isAssignableFrom(superReturnType)) { continue; } } if(topSuper != null && superClass.isInheritor(topSuperContainingClass, true)){ continue; } topSuper = superMethod; topSuperContainingClass = superClass; } return topSuper; } private static boolean isAccessibleFrom(PsiMember member, PsiElement referencingLocation){ if(member.hasModifierProperty(PsiModifier.PUBLIC)){ return true; } final PsiClass containingClass = member.getContainingClass(); if (containingClass == null) { return false; } final PsiClass referencingClass = ClassUtils.getContainingClass(referencingLocation); if (referencingClass == null){ return false; } if(referencingClass.equals(containingClass)){ return true; } if(member.hasModifierProperty(PsiModifier.PRIVATE)){ return false; } return ClassUtils.inSamePackage(containingClass, referencingLocation); } private static boolean isArithmeticOperation( @NotNull IElementType sign){ return arithmeticOps.contains(sign); } private static boolean isBooleanOperation( @NotNull IElementType sign){ return booleanOps.contains(sign); } private static boolean isShiftOperation( @NotNull IElementType sign){ return shiftOps.contains(sign); } private static boolean isOperatorAssignmentOperation( @NotNull IElementType sign){ return operatorAssignmentOps.contains(sign); } private static int getParameterPosition( @NotNull PsiExpressionList expressionList, PsiExpression expression) { final PsiExpression[] expressions = expressionList.getExpressions(); for(int i = 0; i < expressions.length; i++){ if(expressions[i].equals(expression)){ return i; } } return -1; } @Nullable private static PsiType getTypeOfParameter( @NotNull JavaResolveResult result, int parameterPosition) { final PsiMethod method = (PsiMethod) result.getElement(); if (method == null){ return null; } final PsiSubstitutor substitutor = result.getSubstitutor(); final PsiParameterList parameterList = method.getParameterList(); if(parameterPosition < 0){ return null; } final int parametersCount = parameterList.getParametersCount(); final PsiParameter[] parameters; if (parameterPosition >= parametersCount) { final int lastParameterPosition = parametersCount - 1; if (lastParameterPosition < 0) { return null; } parameters = parameterList.getParameters(); final PsiParameter lastParameter = parameters[lastParameterPosition]; if (lastParameter.isVarArgs()) { final PsiArrayType arrayType = (PsiArrayType) lastParameter.getType(); return substitutor.substitute(arrayType.getComponentType()); } return null; } parameters = parameterList.getParameters(); final PsiParameter parameter = parameters[parameterPosition]; final PsiType parameterType = parameter.getType(); if(parameter.isVarArgs()){ final PsiArrayType arrayType = (PsiArrayType)parameterType; return substitutor.substitute(arrayType.getComponentType()); } final PsiType type = substitutor.substitute(parameterType); final TypeStringCreator typeStringCreator = new TypeStringCreator(); type.accept(typeStringCreator); if (typeStringCreator.isModified()) { final PsiManager manager = method.getManager(); final Project project = manager.getProject(); final PsiElementFactory factory = JavaPsiFacade.getInstance(project).getElementFactory(); try { final String typeString = typeStringCreator.getTypeString(); return factory.createTypeFromText(typeString, method); } catch (IncorrectOperationException e) { throw new AssertionError("incorrect type string generated from " + type + ": " + e.getMessage()); } } return type; } /** * Creates a new type string without any wildcards with final * extends bounds from the visited type. */ private static class TypeStringCreator extends PsiTypeVisitor<Object> { private final StringBuilder typeString = new StringBuilder(); private boolean modified = false; @Override public Object visitType(PsiType type) { typeString.append(type.getCanonicalText()); return super.visitType(type); } @Override public Object visitWildcardType(PsiWildcardType wildcardType) { if (wildcardType.isExtends()) { final PsiType extendsBound = wildcardType.getExtendsBound(); if (extendsBound instanceof PsiClassType) { final PsiClassType classType = (PsiClassType) extendsBound; final PsiClass aClass = classType.resolve(); if (aClass != null && aClass.hasModifierProperty(PsiModifier.FINAL)) { modified = true; return super.visitClassType(classType); } } } return super.visitWildcardType(wildcardType); } @Override public Object visitClassType(PsiClassType classType) { final PsiClassType rawType = classType.rawType(); typeString.append(rawType.getCanonicalText()); final PsiType[] parameterTypes = classType.getParameters(); if (parameterTypes.length > 0) { typeString.append('<'); final PsiType parameterType1 = parameterTypes[0]; // IDEADEV-25549 says this can be null if (parameterType1 != null) { parameterType1.accept(this); } for (int i = 1; i < parameterTypes.length; i++) { typeString.append(','); final PsiType parameterType = parameterTypes[i]; // IDEADEV-25549 again if (parameterType != null) { parameterType.accept(this); } } typeString.append('>'); } return null; } public String getTypeString() { return typeString.toString(); } public boolean isModified() { return modified; } } } }
package org.orangepalantir.genericpca; import lightgraph.DataSet; import lightgraph.Graph; import lightgraph.GraphPoints; import org.orangepalantir.genericpca.display.TwoDHeatMap; import javax.imageio.ImageIO; import javax.swing.JFrame; import javax.swing.JScrollPane; import javax.swing.JTextArea; import java.awt.Color; import java.awt.Dimension; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; import java.util.stream.Collectors; /** * Created by msmith on 04.10.17. */ public class CoefficientKmeansND { List<List<IndexedCoefficient>> original; List<List<IndexedCoefficient>> normalized; int ks = 4; int levels = 1000; List<Path> labels; List<double[]> eigenVectors; List<Highlight> highlights; static boolean unscaled = true; public void setInput(List<List<IndexedCoefficient>> input){ List<List<IndexedCoefficient>> replacement = new ArrayList<>(input.size()); for(List<IndexedCoefficient> shape: input){ List<IndexedCoefficient> sorted = new ArrayList<>(shape); sorted.sort((a,b)->Integer.compare(a.i, b.i)); replacement.add(sorted); } original = replacement; normalized = normalizeCoefficients(replacement); } static double difference(double[] a, double[] b){ double sum = 0; for(int i = 0; i<a.length; i++){ sum += (a[i] - b[i])*(a[i] - b[i]); } return Math.sqrt(sum); } public double calculate(int[] indexes){ int n = indexes.length; double[] data = new double[n*normalized.size()]; for(int k = 0; k<normalized.size(); k++){ List<IndexedCoefficient> shape = normalized.get(k); for(int s = 0; s<n; s++){ IndexedCoefficient ic = shape.get(indexes[s]); if(ic.i!=indexes[s]) throw new RuntimeException("coefficient does not correspond to index!"); data[n*k + s] += ic.getCoefficient(); } } double[] means = getInitialMean(n, data); double[] umeans = getMeans(n, means, data); double diff = difference(means, umeans); means = umeans; for(int k = 0; k<levels; k++){ umeans = getMeans(n, means, data); double delta = difference(means, umeans); means = umeans; if(delta==0) break; } umeans = getMeans(n, means, data); List<List<double[]>> partitions = new ArrayList<>(); for(int i = 0; i<ks; i++){ partitions.add(new ArrayList<>()); } for(List<IndexedCoefficient> shape: normalized){ double[] vector = new double[indexes.length]; for(int i = 0; i<n; i++){ vector[i] += shape.get(indexes[i]).getCoefficient(); } Double min = Double.MAX_VALUE; int dex = 0; for(int s = 0; s<ks; s++){ double d = 0; for(int i = 0; i<n; i++){ double v = (vector[i] - means[s*n + i]); d += v*v; } if(d<min){ dex = s; min = d; } } partitions.get(dex).add(vector); } return calculateVariation(means, partitions, n); } private List<List<IndexedCoefficient>> normalizeCoefficients(List<List<IndexedCoefficient>> co){ int cnets = co.get(0).size(); List<List<IndexedCoefficient>> normed = new ArrayList<>(co.size()); for(List<IndexedCoefficient> shape: co){ normed.add(new ArrayList<>()); } for(int index = 0; index< cnets; index++){ double min = Double.MAX_VALUE; double max = -Double.MAX_VALUE; double sum = 0; double sum2 = 0; for(List<IndexedCoefficient> shape: co){ double v = shape.get(index).getCoefficient(); sum += v; sum2 += v*v; min = v<min?v:min; max = v>max?v:max; } sum = sum/co.size(); sum2 = Math.sqrt(sum2/co.size() - sum*sum); min = sum - sum2; max = sum + sum2; for(int i = 0; i<co.size(); i++){ List<IndexedCoefficient> shape = co.get(i); List<IndexedCoefficient> nShape = normed.get(i); IndexedCoefficient ic = shape.get(index); double v = ic.getCoefficient(); double scaled = 2*(v - min)/(max - min) - 1; nShape.add(new IndexedCoefficient(ic.i, scaled)); } } return normed; } public double plot(int[] indexes) throws IOException { int n = indexes.length; double[] data = new double[n*normalized.size()]; for(int k = 0; k<normalized.size(); k++){ List<IndexedCoefficient> shape = normalized.get(k); for(int s = 0; s<n; s++){ IndexedCoefficient ic = shape.get(indexes[s]); if(ic.i!=indexes[s]) throw new RuntimeException("coefficient does not correspond to index!"); data[n*k + s] += ic.getCoefficient(); } } double[] means = getInitialMean(n, data); System.out.println("starting"); double[] umeans = getMeans(n, means, data); double diff = difference(means, umeans); means = umeans; for(int k = 0; k<levels; k++){ umeans = getMeans(n, means, data); double delta = difference(means, umeans); System.out.println(delta); means = umeans; if(delta==0) break; } umeans = getMeans(n, means, data); List<List<double[]>> partitions = new ArrayList<>(); List<Map<Path, List<double[]>>> partyLabels = new ArrayList<>(); for(int i = 0; i<ks; i++){ partitions.add(new ArrayList<>()); if(labels!=null){ partyLabels.add(new HashMap<>()); } } int coefficientIndex = 0; for(List<IndexedCoefficient> shape: normalized){ double[] vector = new double[indexes.length]; for(int i = 0; i<n; i++){ vector[i] += shape.get(indexes[i]).getCoefficient(); } Double min = Double.MAX_VALUE; int dex = 0; for(int s = 0; s<ks; s++){ double d = 0; for(int i = 0; i<n; i++){ double v = (vector[i] - means[s*n + i]); d += v*v; } if(d<min){ dex = s; min = d; } } partitions.get(dex).add(vector); if(labels!=null){ partyLabels.get(dex).computeIfAbsent(labels.get(coefficientIndex).getParent(), a->new ArrayList<>()).add(vector); } coefficientIndex++; } Graph graph = new Graph(); int count = 0; List<GraphPoints> gp = GraphPoints.getGraphPoints(); double xmin = Double.MAX_VALUE; double ymin = Double.MAX_VALUE; double xmax = -xmin; double ymax = -ymin; for(List<double[]> part: partitions){ if(part.size()==0){ count++; continue; } double[] x = new double[part.size()]; double[] y = new double[part.size()]; for(int o = 0; o<part.size(); o++){ x[o] = part.get(o)[0]; y[o] = part.get(o)[1]; } DataSet set = graph.addData(x, y); double[] xrange = getAxisExtremes(x); double[] yrange = getAxisExtremes(y); xmin = xmin<xrange[0]?xmin:xrange[0]; xmax = xmax>xrange[1]?xmax:xrange[1]; ymin = ymin<yrange[0]?ymin:yrange[0]; ymax = ymax>yrange[1]?ymax:yrange[1]; set.setLine(null); //set.setPoints(gp.get(count%9 + 3)); set.setPoints(GraphPoints.filledCircles()); Color c = set.COLOR; c = new Color(c.getRed(), c.getGreen(), c.getBlue(), 050); set.setColor(c); set.setLabel(String.format("k: %d", count)); count++; } graph.setXRange(xmin, xmax); graph.setYRange(ymin, ymax); if(highlights!=null){ Color[] colors = { Color.BLUE, Color.RED, Color.DARK_GRAY }; GraphPoints[] points = { GraphPoints.hollowSquares(), GraphPoints.hollowCircles(), GraphPoints.hollowTriangles() }; int counter = 0; for(Highlight high: highlights){ List<double[]> values = new ArrayList<>(); for(Path p: high.conditions){ for(Map<Path, List<double[]>> map: partyLabels){ values.addAll(map.getOrDefault(p, Collections.EMPTY_LIST)); } } double[] x = new double[values.size()]; double[] y = new double[values.size()]; for(int i = 0; i<x.length; i++){ x[i] = values.get(i)[0]; y[i] = values.get(i)[1]; } DataSet set = graph.addData(x, y); set.setLine(null); set.setPoints(points[counter%points.length]); set.setColor(colors[(counter/colors.length)%colors.length]); set.setPointWeight(2.0); set.setPointSize(8.0); counter++; set.setLabel(high.label); } } StringBuilder builds = new StringBuilder("Separated on indexes"); for(int index: indexes){ builds.append(String.format(" %d", index)); } graph.setTitle(builds.toString()); graph.show(false); if(labels!=null){ showLabels(indexes, partyLabels, means); } if(eigenVectors!=null){ writeMeanShapes(indexes, means); } return calculateVariation(means, partitions, n); } private double calculateVariation(double[] means, List<List<double[]>> partitions, int n) { double[] single = new double[n]; double[] singleSqd = new double[n]; double counter = 0; double massSigma = 0; double sigma = 0; for(int i = 0; i<ks; i++){ List<double[]> vectors = partitions.get(i); for(double[] vector: vectors){ for(int j = 0; j<n; j++){ double delta = vector[j] - means[i*n + j]; sigma += delta*delta; single[j] += vector[j]; singleSqd[j] += vector[j]*vector[j]; } counter++; } } sigma = sigma/counter; for(int i = 0; i<n; i++){ double xbar = single[i]/counter; massSigma += singleSqd[i]/counter - xbar*xbar; } return sigma/massSigma; } /** * Creates a mean shape based on the clustered coefficients. * * @param indexes * @param means */ void writeMeanShapes(int[] indexes, double[] means){ int space= eigenVectors.get(0).length; int width = (int)Math.sqrt(space); List<List<List<IndexedCoefficient>>> partitionedShapes = new ArrayList<>(); int n = indexes.length; for(int i = 0; i<ks; i++){ partitionedShapes.add(new ArrayList<>()); } for(int shapeIndex = 0; shapeIndex<normalized.size(); shapeIndex++){ List<IndexedCoefficient> shape = normalized.get(shapeIndex); Double min = Double.MAX_VALUE; int dex = 0; for(int s = 0; s<ks; s++){ double d = 0; for(int i = 0; i<n; i++){ double v = (shape.get(indexes[i]).getCoefficient() - means[s*n + i]); d += v*v; } if(d<min){ dex = s; min = d; } } partitionedShapes.get(dex).add(original.get(shapeIndex)); } for(int j = 0; j<ks; j++){ StringBuilder name = new StringBuilder("km"); name.append("_"); for(int dex: indexes){ name.append(dex); name.append("-"); } name.append(j); name.append("sum.png"); double[] output = new double[space]; double[] a = new double[eigenVectors.size()]; List<List<IndexedCoefficient>> partition = partitionedShapes.get(j); for(List<IndexedCoefficient> shape: partition){ for(int i = 0; i<a.length; i++){ a[i] += shape.get(i).getCoefficient(); } } int count = partition.size(); for(int i = 0; i<a.length; i++){ a[i] = a[i]/count; } for(int i = 0; i<a.length; i++){ double[] ev = eigenVectors.get(i); double ai = a[i]; for(int k = 0; k<space; k++){ output[k] += ev[k]*ai; } } BufferedImage img = TwoDHeatMap.createMap(width, width, 10, 10, output); try { ImageIO.write(img, "PNG", new File(name.toString())); } catch (IOException e) { e.printStackTrace(); } } } /** * Partition by means. * */ public List<List<List<IndexedCoefficient>>> partitionByMeans(int[] indexes, double[] means, List<List<IndexedCoefficient>> coefficients){ return null; } /** * Gets an auto-generated means for the provided coefficient. Assuming coefficients can be positive * or negative * @param * @return */ double[] getInitialMean(int n, double[] values){ double[] mins = new double[n]; double[] maxs = new double[n]; for(int i = 0; i<n; i++){ mins[i] = Double.MAX_VALUE; maxs[i] = -Double.MAX_VALUE; } int vectors = values.length/n; for(int i = 0; i<vectors; i++){ for(int j = 0; j<n; j++){ double v = values[i*n + j]; mins[j] = v<mins[j]?v:mins[j]; maxs[j] = v>maxs[j]?v:maxs[j]; } } double[] delta = new double[n]; for(int i = 0; i<n; i++){ delta[i] = maxs[i] - mins[i]; } double[] initial = new double[ks*n]; Random rand = new Random(); for(int k = 0; k<ks; k++){ for(int i = 0; i<n; i++){ initial[k*n + i] = mins[i] + rand.nextDouble()*delta[i]; } } return initial; } double[] getMeans(int n, double[] oldmeans, double[] data){ double[] updated = new double[ks*n]; double[] counts = new double[ks]; double[] vector = new double[n]; int vectors = data.length/n; for(int i = 0; i<vectors; i++){ for(int k = 0; k<n; k++){ vector[k] = data[n*i + k]; } double min = Double.MAX_VALUE; int dex = -1; for(int j = 0; j<ks; j++){ double d = 0; for(int k = 0; k<n; k++){ double delta = vector[k] - oldmeans[n*j + k]; d += delta*delta; } if(d<min){ dex = j; min = d; } } for(int j = 0; j<n; j++){ updated[dex*n + j] += vector[j]; } counts[dex]++; } for(int i = 0; i<ks; i++){ double c = counts[i]; if(c>0){ for(int j = 0; j<n; j++){ updated[i*n + j] /= c; } } } return updated; } static class Highlight{ String label; List<Path> conditions; public Highlight(String all){ String[] tokens = all.split(Pattern.quote("\t")); label = tokens[0]; conditions = new ArrayList<>(); for(int i = 1; i<tokens.length; i++){ conditions.add(Paths.get(tokens[i])); } } } public void setHighlights(List<String> highlights){ System.out.println("working"); this.highlights = highlights.stream().map(Highlight::new).collect(Collectors.toList()); } public void showLabels(int[] indexes, List<Map<Path, List<double[]>>> labelParty, double[] means) throws IOException { String title = String.format("Indexes: %s", Arrays.toString(indexes)); JFrame frame = new JFrame(title); StringBuffer buffer = new StringBuffer(); int max = 0; int n = indexes.length; for(int i = 0; i<ks; i++){ buffer.append("#k" + i); for(int j = 0; j<n; j++){ buffer.append(String.format("\t%f", means[i*n + j])); } buffer.append("\n"); } List<List<Map.Entry<Path, List<double[]>>>> party = new ArrayList<>(); for(Map<Path, List<double[]>> labels: labelParty){ ArrayList<Map.Entry<Path, List<double[]>>> entries = new ArrayList<>(); for(Map.Entry<Path, List<double[]>> entry: labels.entrySet()){ entries.add(entry); } party.add(entries); int s = labels.size(); max = s>max?s:max; } for(int i = 0; i<max; i++){ for(List<Map.Entry<Path, List<double[]>>> labels: party){ if(i<labels.size()){ Map.Entry<Path, List<double[]>> entry = labels.get(i); buffer.append(entry.getValue().size() + ":" + entry.getKey()); } buffer.append("\t"); } buffer.append("\n"); } JTextArea area = new JTextArea(buffer.toString()); frame.add(new JScrollPane(area)); frame.setSize(new Dimension(800, 800)); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); frame.setVisible(true); List<String> wrap = new ArrayList<>(); wrap.add(buffer.toString()); StringBuilder builds = new StringBuilder("nd_"); for(int i: indexes){ builds.append(String.format("-%d", i)); } builds.append(".txt"); Files.write(Paths.get(builds.toString()), wrap, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE); } public void setEigens(List<String> eigens){ eigenVectors = new ArrayList<double[]>(); for(String line: eigens){ double[] values = Arrays.stream(line.split(Pattern.quote("\t"))).mapToDouble(Double::parseDouble).toArray(); eigenVectors.add(values); } } static public double[] getAxisExtremes(double[] data){ if(unscaled) return new double[] {-3, 3}; double sum = 0; double sum2 = 0; double min = Double.MAX_VALUE; double max = -min; for(double d: data){ sum += d; sum2 += d*d; min = d<min?d:min; max = d>max?d:max; } double center = sum/data.length; double stdev = Math.sqrt(sum2/data.length - center*center); double top = center + 2*stdev; top = top>max?max:top; double bottom = center - 2*stdev; bottom = bottom<min?min:bottom; return new double[]{bottom, top}; } public void setLabels(List<String> labels){ this.labels = labels.stream().map(Paths::get).collect(Collectors.toList()); } public static void main(String[] args) throws IOException { //testVersion(); loadDataAndRun(args); } public static void loadDataAndRun(String[] args) throws IOException { List<List<IndexedCoefficient>> coefficients = IndexedCoefficient.readCoefficients(Paths.get(args[0])); CoefficientKmeansND kmeans = new CoefficientKmeansND(); kmeans.normalizeCoefficients(coefficients); kmeans.setInput(coefficients); if(args.length>=2) { List<String> labels = Files.readAllLines(Paths.get(args[1])); kmeans.setLabels(labels); } if(args.length>=3){ List<String> eigens = Files.readAllLines(Paths.get(args[2])); kmeans.setEigens(eigens); } if(args.length>=4){ List<String> highlights = Files.readAllLines(Paths.get(args[3])); kmeans.setHighlights(highlights); } int ks = 6; Graph variancePlot = new Graph(); int[][] indexGroups = new int[][]{ {1023, 1020}, {1022, 1021}, {1019, 1018} }; int vectors = indexGroups.length; for(int i = 0; i<vectors; i++){ int prime = 66; int aux = 171; int[] indexes = indexGroups[i]; double[] x = new double[ks]; double[] y = new double[ks]; for(int j = 0; j<ks; j++) { kmeans.ks = 2 + j; //241 & 240 ntc separates. double s = kmeans.plot(indexes); x[j] = kmeans.ks; y[j] = s; } DataSet set = variancePlot.addData(x, y); set.setLabel(Arrays.toString(indexes)); } variancePlot.show(true, "variance"); } public static void testVersion(){ double[] v1 = {5*Math.sqrt(2)/2, 5*Math.sqrt(2)/2, 0}; double[] v2 = {-3*Math.sqrt(2)/2, 3*Math.sqrt(2)/2, 0}; double[] v3 = {1, 0, 3}; List<List<IndexedCoefficient>> data = new ArrayList<>(300); List<String> labels = new ArrayList<>(300); Random ng = new Random(); double noise = 1.0; for(int i = 0; i<100; i++){ List<IndexedCoefficient> shape = new ArrayList<>(2); for(int j = 0; j<3; j++){ IndexedCoefficient ic =new IndexedCoefficient(j, v1[j] + ng.nextGaussian()*noise); shape.add(ic); } labels.add("one/d.txt"); data.add(shape); } for(int i = 0; i<100; i++){ List<IndexedCoefficient> shape = new ArrayList<>(2); for(int j = 0; j<3; j++){ IndexedCoefficient ic =new IndexedCoefficient(j, v2[j] + ng.nextGaussian()*noise); shape.add(ic); } labels.add("two/d.txt"); data.add(shape); } for(int i = 0; i<100; i++){ List<IndexedCoefficient> shape = new ArrayList<>(2); for(int j = 0; j<3; j++){ IndexedCoefficient ic =new IndexedCoefficient(j, v3[j] + ng.nextGaussian()*noise); shape.add(ic); } labels.add("three/d.txt"); data.add(shape); } CoefficientKmeansND kmeans = new CoefficientKmeansND(); kmeans.setInput(data); kmeans.setLabels(labels); double[] x = new double[5]; double[] y = new double[5]; try { for(int i = 1; i<=5; i++){ kmeans.ks = i; double sigma = kmeans.plot(new int[]{0, 1, 2}); x[i-1] = i; y[i-1] = sigma; } new Graph(x, y).show(true); } catch (IOException e) { e.printStackTrace(); } } }
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.api.model; import org.apache.commons.lang3.StringUtils; import org.json.simple.JSONObject; import org.wso2.carbon.apimgt.api.model.policy.Policy; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; /** * Provider's & system's view of API */ @SuppressWarnings("unused") public class API implements Serializable { private static final long serialVersionUID = 1L; private APIIdentifier id; // uuid of registry artifact // this id is provider's username independent private String uuid; private String description; private String url; private String sandboxUrl; private String wsdlUrl; private String wsdlArchivePath; private String wadlUrl; private String swaggerDefinition; private String graphQLSchema; private String type; private String context; private String contextTemplate; private String thumbnailUrl; private ResourceFile wsdlArchive; private Set<String> tags = new LinkedHashSet<String>(); private Set<Documentation> documents = new LinkedHashSet<Documentation>(); private String httpVerb; private Date lastUpdated; private Set<Tier> availableTiers = new LinkedHashSet<Tier>(); private Set<Policy> availableSubscriptionLevelPolicies = new LinkedHashSet<Policy>(); private String apiLevelPolicy; private AuthorizationPolicy authorizationPolicy; private Set<URITemplate> uriTemplates = new LinkedHashSet<URITemplate>(); //dirty pattern to identify which parts to be updated private boolean apiHeaderChanged; private boolean apiResourcePatternsChanged; private String status; private String technicalOwner; private String technicalOwnerEmail; private String businessOwner; private String businessOwnerEmail; // Used for keeping Production & Sandbox Throttling limits. private String productionMaxTps; private String sandboxMaxTps; private String visibility; private String visibleRoles; private String visibleTenants; private List<Label> gatewayLabels; private boolean endpointSecured = false; private boolean endpointAuthDigest = false; private String endpointUTUsername; private String endpointUTPassword; private String transports; private String inSequence; private String outSequence; private String faultSequence; private String oldInSequence; private String oldOutSequence; private String oldFaultSequence; private boolean advertiseOnly; private String apiOwner; private String redirectURL; private String subscriptionAvailability; private String subscriptionAvailableTenants; private CORSConfiguration corsConfiguration; private String endpointConfig; private String responseCache; private int cacheTimeout; private String implementation = "ENDPOINT"; private String monetizationCategory; //Custom authorization header specific to the API private String authorizationHeader; private Set<Scope> scopes; private boolean isDefaultVersion = false; private boolean isPublishedDefaultVersion = false; private Set<String> environments; private String createdTime; /** * Customized properties relevant to the particular API. */ private JSONObject additionalProperties; /** * Properties relevant to monetization of the particular API. */ private JSONObject monetizationProperties = new JSONObject(); /** * Property to indicate the monetization status of the particular API. */ private boolean isMonetizationEnabled = false; // Used for endpoint environments configured with non empty URLs private Set<String> environmentList; // API security at the gateway level. private String apiSecurity = "oauth2"; private static final String NULL_VALUE = "NULL"; private List<APIEndpoint> endpoints = new ArrayList<APIEndpoint>(); /** * Property to hold the enable/disable status of the json schema validation. */ private boolean enableSchemaValidation = false; public void setEnvironmentList(Set<String> environmentList) { this.environmentList = environmentList; } public Set<String> getEnvironmentList() { return environmentList; } /** * To get the additional properties * * @return additional properties of the API */ public JSONObject getAdditionalProperties() { return additionalProperties; } /** * To assign a set of customized properties to the API. * * @param properties Properties that need to be assigned to. */ public void setAdditionalProperties(JSONObject properties) { this.additionalProperties = properties; } /** * This method is used to get the properties related to monetization * * @return properties related to monetization */ public JSONObject getMonetizationProperties() { return monetizationProperties; } /** * This method is used to get the monetization status (true or false) * * @return flag to indicate the monetization status (true or false) */ public boolean getMonetizationStatus() { return isMonetizationEnabled; } /** * This method is used to set the monetization status (true or false) * * @param monetizationStatus flag to indicate the monetization status (true or false) */ public void setMonetizationStatus(boolean monetizationStatus) { this.isMonetizationEnabled = monetizationStatus; } /** * This method is used to set the monetization properties * * @param monetizationProperties properties related to monetization */ public void setMonetizationProperties(JSONObject monetizationProperties) { this.monetizationProperties = monetizationProperties; } /** * This method is used to add monetization property * * @param key key of the monetization property * @param value applicable value of the monetization property */ public void addMonetizationProperty(String key, String value) { monetizationProperties.put(key, value); } /** * To add a new property to additional properties list. * * @param key Name of the property. * @param value Value of the property. */ public void addProperty(String key, String value) { additionalProperties.put(key, value); } /** * To get the value of the property. * * @param key Name of the property * @return value of the property. */ public String getProperty(String key) { return additionalProperties.get(key).toString(); } /** * Publisher access control related parameters. * AccessControl -> Specifies whether that particular API is restricted to certain set of publishers and creators. * AccessControlRoles -> Specifies the roles that the particular API is visible to. */ private String accessControl; private String accessControlRoles; public String getSwaggerDefinition() {return swaggerDefinition; } public void setSwaggerDefinition(String swaggerDefinition) { this.swaggerDefinition = swaggerDefinition; } public void setGraphQLSchema(String graphQLSchema) { this.graphQLSchema = graphQLSchema; } public String getGraphQLSchema() {return graphQLSchema; } public Set<String> getEnvironments() { return environments; } public void setEnvironments(Set<String> environments) { this.environments = environments; } /** * Contains flag indicating whether dummy backend or not * * @return */ public String getImplementation() { return implementation; } /** * Returns flag indicating whether dummy backend or not * * @param implementation */ public void setImplementation(String implementation) { this.implementation = implementation; } /** * The average rating provided by the API subscribers */ private float rating; private boolean isLatest; //TODO: missing - total user count, up time statistics,tier public String getUUID() { return uuid; } public void setUUID(String uuid) { this.uuid = uuid; } public String getProductionMaxTps() { return productionMaxTps; } public void setProductionMaxTps(String productionMaxTps) { this.productionMaxTps = productionMaxTps; } public String getSandboxMaxTps() { return sandboxMaxTps; } public void setSandboxMaxTps(String sandboxMaxTps) { this.sandboxMaxTps = sandboxMaxTps; } public boolean isAdvertiseOnly() { return advertiseOnly; } public void setAdvertiseOnly(boolean advertiseOnly) { this.advertiseOnly = advertiseOnly; } public String getApiOwner() { return apiOwner; } public void setApiOwner(String apiOwner) { this.apiOwner = apiOwner; } public String getRedirectURL() { return redirectURL; } public void setRedirectURL(String redirectURL) { this.redirectURL = redirectURL; } public API(APIIdentifier id) { this.id = id; additionalProperties = new JSONObject(); } public APIIdentifier getId() { return id; } public String getTransports() { return transports; } public void setTransports(String transports) { this.transports = transports; } public String getTechnicalOwner() { return technicalOwner; } public void setTechnicalOwner(String technicalOwner) { this.technicalOwner = technicalOwner; } public String getTechnicalOwnerEmail() { return technicalOwnerEmail; } public void setTechnicalOwnerEmail(String technicalOwnerEmail) { this.technicalOwnerEmail = technicalOwnerEmail; } public String getBusinessOwner() { return businessOwner; } public void setBusinessOwner(String businessOwner) { this.businessOwner = businessOwner; } public String getBusinessOwnerEmail() { return businessOwnerEmail; } public void setBusinessOwnerEmail(String businessOwnerEmail) { this.businessOwnerEmail = businessOwnerEmail; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getSandboxUrl() { return sandboxUrl; } public void setSandboxUrl(String sandboxUrl) { this.sandboxUrl = sandboxUrl; } public String getWsdlUrl() { return wsdlUrl; } public void setContext(String context) { this.context = context; } public String getContext() { return context; } public void setContextTemplate(String contextTemplate) { this.contextTemplate = contextTemplate; } public String getContextTemplate() { return contextTemplate; } public void setWsdlUrl(String wsdlUrl) { this.wsdlUrl = wsdlUrl; } public String getThumbnailUrl() { return thumbnailUrl; } public void setThumbnailUrl(String thumbnailUrl) { this.thumbnailUrl = thumbnailUrl; } public Set<String> getTags() { return Collections.unmodifiableSet(tags); } public void addTags(Set<String> tags) { this.tags.addAll(tags); } public void removeTags(Set<String> tags) { this.tags.removeAll(tags); } public Set<Documentation> getDocuments() { return Collections.unmodifiableSet(documents); } public void addDocuments(Set<Documentation> documents) { this.documents.addAll(documents); } public void removeDocuments(Set<Documentation> documents) { this.documents.removeAll(documents); } public String getHttpVerb() { return httpVerb; } public void setHttpVerb(String httpVerb) { this.httpVerb = httpVerb; } public Date getLastUpdated() { return new Date(lastUpdated.getTime()); } public void setLastUpdated(Date lastUpdated) { this.lastUpdated = new Date(lastUpdated.getTime()); } public Set<Tier> getAvailableTiers() { return Collections.unmodifiableSet(availableTiers); } public void addAvailableTiers(Set<Tier> availableTiers) { this.availableTiers.addAll(availableTiers); } /** * Removes all Tiers from the API object. */ public void removeAllTiers() { availableTiers.clear(); } /** * Removes all Policies from the API object. */ public void removeAllPolicies() { availableSubscriptionLevelPolicies.clear(); } public void removeAvailableTiers(Set<Tier> availableTiers) { this.availableTiers.removeAll(availableTiers); } public Set<URITemplate> getUriTemplates() { return uriTemplates; } public void setUriTemplates(Set<URITemplate> uriTemplates) { this.uriTemplates = uriTemplates; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public void setStatus(APIStatus status) { this.status = status.getStatus(); } public float getRating() { return rating; } public void setRating(float rating) { this.rating = rating; } public void setLatest(boolean latest) { isLatest = latest; } /** * @return true if the current version of the API is the latest */ public boolean isLatest() { return isLatest; } public AuthorizationPolicy getAuthorizationPolicy() { return authorizationPolicy; } public void setAuthorizationPolicy(AuthorizationPolicy authorizationPolicy) { this.authorizationPolicy = authorizationPolicy; } public String getWadlUrl() { return wadlUrl; } public void setWadlUrl(String wadlUrl) { this.wadlUrl = wadlUrl; } public String getVisibility() { return visibility; } public void setVisibility(String visibility) { this.visibility = visibility; } public String getVisibleRoles() { return visibleRoles; } public void setVisibleRoles(String visibleRoles) { this.visibleRoles = visibleRoles; } public String getVisibleTenants() { return visibleTenants; } public void setVisibleTenants(String visibleTenants) { this.visibleTenants = visibleTenants; } public List<Label> getGatewayLabels() { return gatewayLabels; } public void setGatewayLabels(List<Label> gatewayLabels) { this.gatewayLabels = gatewayLabels; } public boolean isApiHeaderChanged() { return apiHeaderChanged; } public void setApiHeaderChanged(boolean apiHeaderChanged) { this.apiHeaderChanged = apiHeaderChanged; } public boolean isApiResourcePatternsChanged() { return apiResourcePatternsChanged; } public void setApiResourcePatternsChanged(boolean apiResourcePatternsChanged) { this.apiResourcePatternsChanged = apiResourcePatternsChanged; } /** * @return the endpointUTUsername */ public String getEndpointUTUsername() { return endpointUTUsername; } /** * @param endpointUTUsername the endpointUTUsername to set */ public void setEndpointUTUsername(String endpointUTUsername) { this.endpointUTUsername = endpointUTUsername; } /** * @return the endpointUTPassword */ public String getEndpointUTPassword() { return endpointUTPassword; } /** * @param endpointUTPassword the endpointUTPassword to set */ public void setEndpointUTPassword(String endpointUTPassword) { this.endpointUTPassword = endpointUTPassword; } /** * @return the endpointSecured */ public boolean isEndpointSecured() { return endpointSecured; } /** * @param endpointSecured the endpointSecured to set */ public void setEndpointSecured(boolean endpointSecured) { this.endpointSecured = endpointSecured; } /** * @return the endpointAuthDigest */ public boolean isEndpointAuthDigest() { return endpointAuthDigest; } /** * @param endpointAuthDigest the endpointAuthDigest to set */ public void setEndpointAuthDigest(boolean endpointAuthDigest) { this.endpointAuthDigest = endpointAuthDigest; } public String getInSequence() { return inSequence; } /** * @param inSeq insequence for the API */ public void setInSequence(String inSeq) { this.inSequence = inSeq; } public String getOutSequence() { return outSequence; } /** * @param outSeq outSequence for the API */ public void setOutSequence(String outSeq) { this.outSequence = outSeq; } /** * remove custom sequences from api object */ public void removeCustomSequences() { this.inSequence = null; this.outSequence = null; this.faultSequence = null; } public String getOldInSequence() { return oldInSequence; } public void setOldInSequence(String oldInSequence) { this.oldInSequence = oldInSequence; } public String getOldOutSequence() { return oldOutSequence; } public void setOldOutSequence(String oldOutSequence) { this.oldOutSequence = oldOutSequence; } public String getSubscriptionAvailability() { return subscriptionAvailability; } public void setSubscriptionAvailability(String subscriptionAvailability) { this.subscriptionAvailability = subscriptionAvailability; } public String getSubscriptionAvailableTenants() { return subscriptionAvailableTenants; } public void setSubscriptionAvailableTenants(String subscriptionAvailableTenants) { this.subscriptionAvailableTenants = subscriptionAvailableTenants; } public String getEndpointConfig() { // This is to support new Endpoint object if ((endpointConfig == null || StringUtils.isAllEmpty(endpointConfig) && endpoints.size() > 0)) { return getEndpointConfigString(endpoints); } return endpointConfig; } public void setEndpointConfig(String endpointConfig) { this.endpointConfig = endpointConfig; } public String getResponseCache() { return responseCache; } public void setResponseCache(String responseCache) { this.responseCache = responseCache; } public int getCacheTimeout() { return cacheTimeout; } public void setCacheTimeout(int cacheTimeout) { this.cacheTimeout = cacheTimeout; } public String getFaultSequence() { return faultSequence; } public void setFaultSequence(String faultSequence) { this.faultSequence = faultSequence; } public String getOldFaultSequence() { return oldFaultSequence; } public void setOldFaultSequence(String oldFaultSequence) { this.oldFaultSequence = oldFaultSequence; } public Set<Scope> getScopes() { return scopes; } public void setScopes(Set<Scope> scopes) { this.scopes = scopes; } public void setAsDefaultVersion(boolean value) { isDefaultVersion = value; } public void setAsPublishedDefaultVersion(boolean value) { isPublishedDefaultVersion = value; } public boolean isDefaultVersion() { return isDefaultVersion; } public boolean isPublishedDefaultVersion() { return isPublishedDefaultVersion; } public CORSConfiguration getCorsConfiguration() { return corsConfiguration; } public void setCorsConfiguration(CORSConfiguration corsConfiguration) { this.corsConfiguration = corsConfiguration; } public String getMonetizationCategory() { return this.monetizationCategory; } public void setMonetizationCategory(String monetizationCategory) { this.monetizationCategory = monetizationCategory; } public String getApiLevelPolicy() { return apiLevelPolicy; } public void setApiLevelPolicy(String apiLevelPolicy) { this.apiLevelPolicy = apiLevelPolicy; } public String getType() { return type; } public void setType(String type) { if (StringUtils.isEmpty(type) || NULL_VALUE.equalsIgnoreCase(StringUtils.trim(type))) { this.type = "HTTP"; } else { this.type = StringUtils.trim(type).toUpperCase(); } } public String getCreatedTime() { return createdTime; } public void setCreatedTime(String createdTime) { this.createdTime = createdTime; } public String getAccessControlRoles() { return accessControlRoles; } public void setAccessControlRoles(String accessControlRoles) { this.accessControlRoles = accessControlRoles; } public String getAccessControl() { return accessControl; } public void setAccessControl(String accessControl) { this.accessControl = accessControl; } public String getAuthorizationHeader() { return authorizationHeader; } public void setAuthorizationHeader(String authorizationHeader) { this.authorizationHeader = authorizationHeader; } /** * Check the status of the Json schema validation property. * * @return Status of the validator property. */ public boolean isEnabledSchemaValidation() { return enableSchemaValidation; } /** * To set the JSON schema validation enable/disable. * * @param enableSchemaValidation Given Status. */ public void setEnableSchemaValidation(boolean enableSchemaValidation) { this.enableSchemaValidation = enableSchemaValidation; } /** * To set the gateway security for the relevant API. * * @param apiSecurity Relevant type of gateway security for the API. */ public void setApiSecurity(String apiSecurity) { if (apiSecurity != null) { this.apiSecurity = apiSecurity; } } /** * To get the gateway level security specific to the relevant API. * * @return Relevant type of gateway security. */ public String getApiSecurity() { return apiSecurity; } public String getWsdlArchivePath() { return wsdlArchivePath; } public void setWsdlArchivePath(String wsdlArchivePath) { this.wsdlArchivePath = wsdlArchivePath; } public ResourceFile getWsdlArchive() { return wsdlArchive; } public void setWsdlArchive(ResourceFile wsdlArchive) { this.wsdlArchive = wsdlArchive; } public List<APIEndpoint> getEndpoint() { return endpoints; } public void setEndpoint(List<APIEndpoint> endpoint) { this.endpoints = endpoint; } /** * This method returns endpoints according to the given endpoint config * * @param endpoints list of endpoints given * @return String endpoint config */ public static String getEndpointConfigString(List<APIEndpoint> endpoints) { //todo improve this logic to support multiple endpoints such as failorver and load balance StringBuilder sb = new StringBuilder(); if (endpoints != null && endpoints.size() > 0) { sb.append("{"); for (APIEndpoint endpoint : endpoints) { sb.append("\"") .append(endpoint.getType()) .append("\": {\"url\":\"") .append(endpoint.getInline().getEndpointConfig().getList().get(0).getUrl()) .append("\",\"timeout\":\"") .append(endpoint.getInline().getEndpointConfig().getList().get(0).getTimeout()) .append("\",\"key\":\"") .append(endpoint.getKey()) .append("\"},"); } sb.append("\"endpoint_type\" : \"") .append(endpoints.get(0).getInline().getType())//assuming all the endpoints are same type .append("\"}\n"); } return sb.toString(); } }
package com.fasterxml.jackson.databind.interop; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Tests for serialization and deserialization of objects based on * <a href="https://immutables.github.io/">immutables</a>. *<p> * Originally to verify fix for * <a href="https://github.com/FasterXML/jackson-databind/pull/2894">databind#2894</a> * to guard against regression. */ public class ImmutablesTypeSerializationTest { /* * Interface Definitions based on the immutables annotation processor: https://immutables.github.io/ */ @JsonDeserialize(as = ImmutableAccount.class) @JsonSerialize(as = ImmutableAccount.class) interface Account { Long getId(); String getName(); } @JsonDeserialize(as = ImmutableKey.class) @JsonSerialize(as = ImmutableKey.class) interface Key<T> { T getId(); } @JsonDeserialize(as = ImmutableEntry.class) @JsonSerialize(as = ImmutableEntry.class) interface Entry<K, V> { K getKey(); V getValue(); } /* * Implementations based on the output of the immutables annotation processor version 2.8.8. * See https://immutables.github.io/ */ static final class ImmutableAccount implements ImmutablesTypeSerializationTest.Account { private final Long id; private final String name; ImmutableAccount(Long id, String name) { this.id = id; this.name = name; } @JsonProperty("id") @Override public Long getId() { return id; } @JsonProperty("name") @Override public String getName() { return name; } @Override public boolean equals(Object another) { if (this == another) return true; return another instanceof ImmutableAccount && equalTo((ImmutableAccount) another); } private boolean equalTo(ImmutableAccount another) { return id.equals(another.id) && name.equals(another.name); } @Override public int hashCode() { int h = 5381; h += (h << 5) + id.hashCode(); h += (h << 5) + name.hashCode(); return h; } @Override public String toString() { return "Account{id=" + id + ", name=" + name + "}"; } @JsonDeserialize @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE) static final class Json implements ImmutablesTypeSerializationTest.Account { Long id; String name; @JsonProperty("id") public void setId(Long id) { this.id = id; } @JsonProperty("name") public void setName(String name) { this.name = name; } @Override public Long getId() { throw new UnsupportedOperationException(); } @Override public String getName() { throw new UnsupportedOperationException(); } } @JsonCreator(mode = JsonCreator.Mode.DELEGATING) static ImmutableAccount fromJson(ImmutableAccount.Json json) { ImmutableAccount.Builder builder = ImmutableAccount.builder(); if (json.id != null) { builder.id(json.id); } if (json.name != null) { builder.name(json.name); } return builder.build(); } public static ImmutableAccount.Builder builder() { return new ImmutableAccount.Builder(); } public static final class Builder { private static final long INIT_BIT_ID = 0x1L; private static final long INIT_BIT_NAME = 0x2L; private long initBits = 0x3L; private Long id; private String name; Builder() { } public final ImmutableAccount.Builder from(ImmutablesTypeSerializationTest.Account instance) { Objects.requireNonNull(instance, "instance"); id(instance.getId()); name(instance.getName()); return this; } @JsonProperty("id") public final ImmutableAccount.Builder id(Long id) { this.id = Objects.requireNonNull(id, "id"); initBits &= ~INIT_BIT_ID; return this; } @JsonProperty("name") public final ImmutableAccount.Builder name(String name) { this.name = Objects.requireNonNull(name, "name"); initBits &= ~INIT_BIT_NAME; return this; } public ImmutableAccount build() { if (initBits != 0) { throw new IllegalStateException(formatRequiredAttributesMessage()); } return new ImmutableAccount(id, name); } private String formatRequiredAttributesMessage() { List<String> attributes = new ArrayList<>(); if ((initBits & INIT_BIT_ID) != 0) attributes.add("id"); if ((initBits & INIT_BIT_NAME) != 0) attributes.add("name"); return "Cannot build Account, some of required attributes are not set " + attributes; } } } static final class ImmutableKey<T> implements ImmutablesTypeSerializationTest.Key<T> { private final T id; ImmutableKey(T id) { this.id = id; } @JsonProperty("id") @Override public T getId() { return id; } @Override public boolean equals(Object another) { if (this == another) return true; return another instanceof ImmutableKey<?> && equalTo((ImmutableKey<?>) another); } private boolean equalTo(ImmutableKey<?> another) { return id.equals(another.id); } @Override public int hashCode() { int h = 5381; h += (h << 5) + id.hashCode(); return h; } @Override public String toString() { return "Key{id=" + id + "}"; } @JsonDeserialize @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE) static final class Json<T> implements ImmutablesTypeSerializationTest.Key<T> { T id; @JsonProperty("id") public void setId(T id) { this.id = id; } @Override public T getId() { throw new UnsupportedOperationException(); } } @JsonCreator(mode = JsonCreator.Mode.DELEGATING) static <T> ImmutableKey<T> fromJson(ImmutableKey.Json<T> json) { ImmutableKey.Builder<T> builder = ImmutableKey.<T>builder(); if (json.id != null) { builder.id(json.id); } return builder.build(); } public static <T> ImmutableKey.Builder<T> builder() { return new ImmutableKey.Builder<>(); } public static final class Builder<T> { private static final long INIT_BIT_ID = 0x1L; private long initBits = 0x1L; private T id; Builder() { } public final ImmutableKey.Builder<T> from(ImmutablesTypeSerializationTest.Key<T> instance) { Objects.requireNonNull(instance, "instance"); id(instance.getId()); return this; } @JsonProperty("id") public final ImmutableKey.Builder<T> id(T id) { this.id = Objects.requireNonNull(id, "id"); initBits &= ~INIT_BIT_ID; return this; } public ImmutableKey<T> build() { if (initBits != 0) { throw new IllegalStateException(formatRequiredAttributesMessage()); } return new ImmutableKey<>(id); } private String formatRequiredAttributesMessage() { List<String> attributes = new ArrayList<>(); if ((initBits & INIT_BIT_ID) != 0) attributes.add("id"); return "Cannot build Key, some of required attributes are not set " + attributes; } } } static final class ImmutableEntry<K, V> implements ImmutablesTypeSerializationTest.Entry<K, V> { private final K key; private final V value; ImmutableEntry(K key, V value) { this.key = key; this.value = value; } @JsonProperty("key") @Override public K getKey() { return key; } @JsonProperty("value") @Override public V getValue() { return value; } @Override public boolean equals(Object another) { if (this == another) return true; return another instanceof ImmutableEntry<?, ?> && equalTo((ImmutableEntry<?, ?>) another); } private boolean equalTo(ImmutableEntry<?, ?> another) { return key.equals(another.key) && value.equals(another.value); } @Override public int hashCode() { int h = 5381; h += (h << 5) + key.hashCode(); h += (h << 5) + value.hashCode(); return h; } @Override public String toString() { return "Entry{key=" + key + ", value=" + value + "}"; } @JsonDeserialize @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE) static final class Json<K, V> implements ImmutablesTypeSerializationTest.Entry<K, V> { K key; V value; @JsonProperty("key") public void setKey(K key) { this.key = key; } @JsonProperty("value") public void setValue(V value) { this.value = value; } @Override public K getKey() { throw new UnsupportedOperationException(); } @Override public V getValue() { throw new UnsupportedOperationException(); } } @JsonCreator(mode = JsonCreator.Mode.DELEGATING) static <K, V> ImmutableEntry<K, V> fromJson(ImmutableEntry.Json<K, V> json) { ImmutableEntry.Builder<K, V> builder = ImmutableEntry.<K, V>builder(); if (json.key != null) { builder.key(json.key); } if (json.value != null) { builder.value(json.value); } return builder.build(); } public static <K, V> ImmutableEntry.Builder<K, V> builder() { return new ImmutableEntry.Builder<>(); } public static final class Builder<K, V> { private static final long INIT_BIT_KEY = 0x1L; private static final long INIT_BIT_VALUE = 0x2L; private long initBits = 0x3L; private K key; private V value; Builder() { } public final ImmutableEntry.Builder<K, V> from(ImmutablesTypeSerializationTest.Entry<K, V> instance) { Objects.requireNonNull(instance, "instance"); key(instance.getKey()); value(instance.getValue()); return this; } @JsonProperty("key") public final ImmutableEntry.Builder<K, V> key(K key) { this.key = Objects.requireNonNull(key, "key"); initBits &= ~INIT_BIT_KEY; return this; } @JsonProperty("value") public final ImmutableEntry.Builder<K, V> value(V value) { this.value = Objects.requireNonNull(value, "value"); initBits &= ~INIT_BIT_VALUE; return this; } public ImmutableEntry<K, V> build() { if (initBits != 0) { throw new IllegalStateException(formatRequiredAttributesMessage()); } return new ImmutableEntry<>(key, value); } private String formatRequiredAttributesMessage() { List<String> attributes = new ArrayList<>(); if ((initBits & INIT_BIT_KEY) != 0) attributes.add("key"); if ((initBits & INIT_BIT_VALUE) != 0) attributes.add("value"); return "Cannot build Entry, some of required attributes are not set " + attributes; } } } /* /********************************************************** /* Unit tests /********************************************************** */ private static final ObjectMapper MAPPER = new ObjectMapper(); @Test public void testImmutablesSimpleDeserialization() throws IOException { Account expected = ImmutableAccount.builder() .id(1L) .name("foo") .build(); Account actual = MAPPER.readValue("{\"id\": 1,\"name\":\"foo\"}", Account.class); assertEquals(expected, actual); } @Test public void testImmutablesSimpleRoundTrip() throws IOException { Account original = ImmutableAccount.builder() .id(1L) .name("foo") .build(); String json = MAPPER.writeValueAsString(original); Account deserialized = MAPPER.readValue(json, Account.class); assertEquals(original, deserialized); } @Test public void testImmutablesSimpleGenericDeserialization() throws IOException { Key<Account> expected = ImmutableKey.<Account>builder() .id(ImmutableAccount.builder() .id(1L) .name("foo") .build()) .build(); Key<Account> actual = MAPPER.readValue( "{\"id\":{\"id\": 1,\"name\":\"foo\"}}", new TypeReference<Key<Account>>() {}); assertEquals(expected, actual); } @Test public void testImmutablesSimpleGenericRoundTrip() throws IOException { Key<Account> original = ImmutableKey.<Account>builder() .id(ImmutableAccount.builder() .id(1L) .name("foo") .build()) .build(); String json = MAPPER.writeValueAsString(original); Key<Account> deserialized = MAPPER.readValue(json, new TypeReference<Key<Account>>() {}); assertEquals(original, deserialized); } @Test public void testImmutablesMultipleTypeParametersDeserialization() throws IOException { Entry<Key<Account>, Account> expected = ImmutableEntry.<Key<Account>, Account>builder() .key(ImmutableKey.<Account>builder() .id(ImmutableAccount.builder() .id(1L) .name("foo") .build()) .build()) .value(ImmutableAccount.builder() .id(2L) .name("bar") .build()) .build(); Entry<Key<Account>, Account> actual = MAPPER.readValue( "{\"key\":{\"id\":{\"id\": 1,\"name\":\"foo\"}},\"value\":{\"id\":2,\"name\":\"bar\"}}", new TypeReference<Entry<Key<Account>, Account>>() {}); assertEquals(expected, actual); } @Test public void testImmutablesMultipleTypeParametersRoundTrip() throws IOException { Entry<Key<Account>, Account> original = ImmutableEntry.<Key<Account>, Account>builder() .key(ImmutableKey.<Account>builder() .id(ImmutableAccount.builder() .id(1L) .name("foo") .build()) .build()) .value(ImmutableAccount.builder() .id(2L) .name("bar") .build()) .build(); String json = MAPPER.writeValueAsString(original); Entry<Key<Account>, Account> deserialized = MAPPER.readValue( json, new TypeReference<Entry<Key<Account>, Account>>() {}); assertEquals(original, deserialized); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.compute.config; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.testng.Assert.assertEquals; import java.util.Map; import org.jclouds.config.ValueOfConfigurationKeyOrNull; import org.jclouds.domain.Credentials; import org.jclouds.domain.LoginCredentials; import org.testng.annotations.Test; @Test(groups = "unit", testName = "GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNullTest") public class GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNullTest { public void testWhenCredentialsNotPresentReturnsNull() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); Credentials expected = null; ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn(null); expect(config.apply("jclouds.image.login-user")).andReturn(null); expect(config.apply("provider.image.authenticate-sudo")).andReturn(null); expect(config.apply("jclouds.image.authenticate-sudo")).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsNotPresentAndProviderPropertyHasUser() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); Credentials expected = new Credentials("ubuntu", null); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn("ubuntu"); expect(config.apply("provider.image.authenticate-sudo")).andReturn(null); expect(config.apply("jclouds.image.authenticate-sudo")).andReturn(null); expect(credstore.put("image", expected)).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsNotPresentAndJcloudsPropertyHasUser() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); Credentials expected = new Credentials("ubuntu", null); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn(null); expect(config.apply("jclouds.image.login-user")).andReturn("ubuntu"); expect(config.apply("provider.image.authenticate-sudo")).andReturn(null); expect(config.apply("jclouds.image.authenticate-sudo")).andReturn(null); expect(credstore.put("image", expected)).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsAlreadyPresentReturnsSame() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); Credentials expected = new Credentials("root", null); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(true); expect(credstore.get("image")).andReturn(expected); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsNotPresentAndProviderPropertyHasUserAndPassword() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); Credentials expected = new Credentials("ubuntu", "password"); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn("ubuntu:password"); expect(config.apply("provider.image.authenticate-sudo")).andReturn(null); expect(config.apply("jclouds.image.authenticate-sudo")).andReturn(null); expect(credstore.put("image", expected)).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsNotPresentAndJcloudsPropertyHasUserAndPassword() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); Credentials expected = new Credentials("ubuntu", "password"); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn(null); expect(config.apply("jclouds.image.login-user")).andReturn("ubuntu:password"); expect(config.apply("provider.image.authenticate-sudo")).andReturn(null); expect(config.apply("jclouds.image.authenticate-sudo")).andReturn(null); expect(credstore.put("image", expected)).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsNotPresentAndJcloudsPropertyHasUserAndPasswordAndSudo() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); LoginCredentials expected = LoginCredentials.builder().user("ubuntu").password("password").authenticateSudo(true) .build(); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn(null); expect(config.apply("jclouds.image.login-user")).andReturn("ubuntu:password"); expect(config.apply("provider.image.authenticate-sudo")).andReturn(null); expect(config.apply("jclouds.image.authenticate-sudo")).andReturn("true"); expect(credstore.put("image", expected)).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } public void testWhenCredentialsNotPresentAndProviderPropertyHasUserAndPasswordAndSudo() { @SuppressWarnings("unchecked") Map<String, Credentials> credstore = createMock(Map.class); LoginCredentials expected = LoginCredentials.builder().user("ubuntu").password("password").authenticateSudo(true) .build(); ValueOfConfigurationKeyOrNull config = createMock(ValueOfConfigurationKeyOrNull.class); expect(credstore.containsKey("image")).andReturn(false); expect(config.apply("provider.image.login-user")).andReturn("ubuntu:password"); expect(config.apply("provider.image.authenticate-sudo")).andReturn("true"); expect(credstore.put("image", expected)).andReturn(null); replay(config); replay(credstore); GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull fn = new GetLoginForProviderFromPropertiesAndStoreCredentialsOrReturnNull( "provider", config, credstore); assertEquals(fn.get(), expected); verify(config); verify(credstore); } }
/* * Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * URLResourceModelSource.java * * User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a> * Created: 7/21/11 4:33 PM * */ package com.dtolabs.rundeck.core.resources; import com.dtolabs.rundeck.core.common.*; import com.dtolabs.rundeck.core.common.impl.URLFileUpdater; import com.dtolabs.rundeck.core.common.impl.URLFileUpdaterBuilder; import com.dtolabs.rundeck.core.plugins.configuration.*; import com.dtolabs.rundeck.core.resources.format.ResourceFormatParser; import com.dtolabs.rundeck.core.resources.format.ResourceFormatParserException; import com.dtolabs.rundeck.core.resources.format.UnsupportedFormatException; import com.dtolabs.rundeck.plugins.util.DescriptionBuilder; import com.dtolabs.rundeck.plugins.util.PropertyBuilder; import org.apache.commons.codec.binary.Hex; import org.apache.log4j.Logger; import java.io.File; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.Charset; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; /** * URLResourceModelSource produces nodes from a URL * * @author Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a> */ public class URLResourceModelSource implements ResourceModelSource, Configurable { static final Logger logger = Logger.getLogger(URLResourceModelSource.class.getName()); public static final int DEFAULT_TIMEOUT = 30; final private Framework framework; Configuration configuration; private File destinationTempFile; private File destinationCacheData; private String tempFileName; URLFileUpdater.httpClientInteraction interaction; public URLResourceModelSource(final Framework framework) { this.framework = framework; } final static HashSet<String> allowedProtocols = new HashSet<String>(Arrays.asList("http", "https", "file")); public static final class URLValidator implements PropertyValidator { public boolean isValid (String value)throws ValidationException { final URL url; try { url = new URL(value); } catch (MalformedURLException e) { throw new ValidationException(e.getMessage()); } if (null != url && !allowedProtocols.contains(url.getProtocol().toLowerCase())) { throw new ValidationException("url protocol not supported: " + url.getProtocol()); } return true; } } public static final Description DESCRIPTION = DescriptionBuilder.builder() .name("url") .title("URL Source") .description("Retrieves a URL containing node definitions in a supported format") .property(PropertyBuilder.builder() .string(Configuration.URL) .title("URL") .description("URL for the remote resource model document") .required(true) .validator(new URLValidator()) .build() ) .property(PropertyBuilder.builder() .integer(Configuration.TIMEOUT) .title("Timeout") .description("Timeout (in seconds) before requests fail. 0 means no timeout.") .defaultValue("30") .build() ) .property(PropertyBuilder.builder() .booleanType(Configuration.CACHE) .title("Cache results") .description("Refresh results only if modified?") .required(true) .defaultValue("true") .build() ) .build(); public static class Configuration { public static final String URL = "url"; public static final String PROJECT = "project"; public static final String CACHE = "cache"; public static final String TIMEOUT = "timeout"; URL nodesUrl; String project; boolean useCache = true; int timeout = DEFAULT_TIMEOUT; private final Properties properties; Configuration() { properties = new Properties(); } Configuration(final Properties configuration) { if (null == configuration) { throw new NullPointerException("configuration"); } this.properties = configuration; configure(); } private void configure() { if (properties.containsKey(URL)) { try { nodesUrl = new URL(properties.getProperty(URL)); } catch (MalformedURLException e) { e.printStackTrace(); } } if (properties.containsKey(PROJECT)) { project = properties.getProperty(PROJECT); } if (properties.containsKey(CACHE)) { useCache = Boolean.parseBoolean(properties.getProperty(CACHE)); } if (properties.containsKey(TIMEOUT)) { try { timeout = Integer.parseInt(properties.getProperty(TIMEOUT)); } catch (NumberFormatException e) { } } } void validate() throws ConfigurationException { if (null == project) { throw new ConfigurationException("project is required"); } if (null == nodesUrl && properties.containsKey(URL)) { try { nodesUrl = new URL(properties.getProperty(URL)); } catch (MalformedURLException e) { throw new ConfigurationException("url is malformed: " + e.getMessage(), e); } } else if (null == nodesUrl) { throw new ConfigurationException("url is required"); } if (null != nodesUrl && !allowedProtocols.contains(nodesUrl.getProtocol().toLowerCase())) { throw new ConfigurationException("url protocol not allowed: " + nodesUrl.getProtocol()); } if (properties.containsKey(TIMEOUT)) { try { timeout = Integer.parseInt(properties.getProperty(TIMEOUT)); } catch (NumberFormatException e) { throw new ConfigurationException("timeout is invalid: " + e.getMessage(), e); } } } Configuration(final Configuration configuration) { this(configuration.getProperties()); } public Configuration url(final String url) { try { this.nodesUrl = new URL(url); } catch (MalformedURLException e) { } properties.setProperty("url", url); return this; } public Configuration project(final String project) { this.project = project; properties.setProperty(PROJECT, project); return this; } public Configuration cache(final boolean cache) { this.useCache = cache; properties.setProperty(CACHE, Boolean.toString(cache)); return this; } public Configuration timeout(final int timeout) { this.timeout = timeout; properties.setProperty(TIMEOUT, Integer.toString(timeout)); return this; } public static Configuration fromProperties(final Properties configuration) { return new Configuration(configuration); } public static Configuration clone(final Configuration configuration) { return fromProperties(configuration.getProperties()); } public static Configuration build() { return new Configuration(); } public Properties getProperties() { return properties; } } public void configure(final Properties configuration) throws ConfigurationException { this.configuration = new Configuration(configuration); this.configuration.validate(); //set destination temp file tempFileName = hashURL(this.configuration.nodesUrl.toExternalForm()) + ".temp"; destinationTempFile = new File(framework.getBaseDir(), "var/urlResourceModelSourceCache/"+this.configuration.project+"/" + tempFileName); destinationCacheData = new File(framework.getBaseDir(), "var/urlResourceModelSourceCache/" +this.configuration.project+"/"+ tempFileName + ".cache.properties"); if (!destinationTempFile.getParentFile().isDirectory() && !destinationTempFile.getParentFile().mkdirs()) { logger.warn( "Unable to create destination directory: " + destinationTempFile.getParentFile().getAbsolutePath()); } } private String hashURL(final String url) { try { MessageDigest digest = MessageDigest.getInstance("SHA-1"); digest.reset(); digest.update(url.getBytes(Charset.forName("UTF-8"))); return new String(Hex.encodeHex(digest.digest())); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } return Integer.toString(url.hashCode()); } public INodeSet getNodes() throws ResourceModelSourceException { //update from URL if necessary final URLFileUpdaterBuilder urlFileUpdaterBuilder = new URLFileUpdaterBuilder() .setUrl(configuration.nodesUrl) .setAcceptHeader("*/*") .setTimeout(configuration.timeout); if (configuration.useCache) { urlFileUpdaterBuilder .setCacheMetadataFile(destinationCacheData) .setCachedContent(destinationTempFile) .setUseCaching(true); } final URLFileUpdater updater = urlFileUpdaterBuilder.createURLFileUpdater(); try { if (null != interaction) { //allow mock updater.setInteraction(interaction); } UpdateUtils.update(updater, destinationTempFile); logger.debug("Updated nodes resources file: " + destinationTempFile); } catch (UpdateUtils.UpdateException e) { if (!destinationTempFile.isFile() || destinationTempFile.length() < 1) { throw new ResourceModelSourceException( "Error requesting URL Resource Model Source: " + configuration.nodesUrl + ": " + e.getMessage(), e); } else { logger.error("Error requesting URL Resource Model Source: " + configuration.nodesUrl + ": " + e.getMessage(), e); } } final ResourceFormatParser parser; if ("file".equalsIgnoreCase(configuration.nodesUrl.getProtocol())) { try { parser = framework.getResourceFormatParserService().getParserForFileExtension(new File( configuration.nodesUrl.toURI())); } catch (UnsupportedFormatException e) { throw new ResourceModelSourceException( "Error requesting URL Resource Model Source: " + configuration.nodesUrl + ": No supported format available for file extension", e); } catch (URISyntaxException e) { throw new ResourceModelSourceException( "Error requesting URL Resource Model Source: " + configuration.nodesUrl + ": " + e.getMessage(), e); } logger.debug("Determined URL content format from file name: " + configuration.nodesUrl); } else { final String mimetype = updater.getContentType(); try { parser = framework.getResourceFormatParserService().getParserForMIMEType(mimetype); } catch (UnsupportedFormatException e) { throw new ResourceModelSourceException( "Error requesting URL Resource Model Source: " + configuration.nodesUrl + ": Response content type is not supported: " + mimetype, e); } logger.debug("Determined URL content format from MIME type: " + mimetype); } if (destinationTempFile.isFile() && destinationTempFile.length() > 0) { try { return parser.parseDocument(destinationTempFile); } catch (ResourceFormatParserException e) { throw new ResourceModelSourceException( "Error requesting URL Resource Model Source: " + configuration.nodesUrl + ": Content could not be parsed: "+e.getMessage(),e); } } else { return new NodeSetImpl(); } } @Override public String toString() { return "URLResourceModelSource{" + "URL='" + configuration.nodesUrl + '\'' + '}'; } }
package com.intirix.openmm.server.api.beans; import org.simpleframework.xml.Default; import org.simpleframework.xml.Element; @Default public class Movie implements Comparable< Movie >, Cloneable { private int id; private String name; private String displayName; @Element(required=false) private String imdbId; @Element(required=false) private String rtId; @Element(required=false) private String year = ""; @Element(required=false) private String releaseDate = ""; @Element(required=false) private String description = ""; @Element(required=false) private String posterUrl = ""; @Element(required=false) private String runtime = ""; @Element(required=false) private String mpaaRating = ""; @Element(required=false) private String rating = ""; @Element(required=false) private String genre = ""; // @Element(required=false) private MediaLink[] trailers = new MediaLink[]{}; // @Element(required=false) private MediaLink[] links = new MediaLink[]{}; @Element(required=false) private String lastWatched = ""; public int getId() { return id; } public void setId( int id ) { this.id = id; } public String getName() { return name; } public void setName( String name ) { this.name = name; } public String getDisplayName() { return displayName; } public void setDisplayName( String displayName ) { this.displayName = displayName; } public String getImdbId() { return imdbId; } public void setImdbId( String imdbId ) { this.imdbId = imdbId; } public String getRtId() { return rtId; } public void setRtId( String rtId ) { this.rtId = rtId; } public String getYear() { return year; } public void setYear( String year ) { if ( year != null ) { this.year = year; } } public String getReleaseDate() { return releaseDate; } public void setReleaseDate( String releaseDate ) { if ( releaseDate != null ) { this.releaseDate = releaseDate; } } public String getDescription() { return description; } public void setDescription( String description ) { if ( description != null ) { this.description = description; } } public String getPosterUrl() { return posterUrl; } public void setPosterUrl( String posterUrl ) { if ( posterUrl != null ) { this.posterUrl = posterUrl; } } public String getRuntime() { return runtime; } public void setRuntime( String runtime ) { if ( runtime != null ) { this.runtime = runtime; } } public String getMpaaRating() { return mpaaRating; } public void setMpaaRating( String mpaaRating ) { if ( mpaaRating != null ) { this.mpaaRating = mpaaRating; } } public String getRating() { return rating; } public void setRating( String rating ) { if ( rating != null ) { this.rating = rating; } } public String getGenre() { return genre; } public void setGenre( String genre ) { if ( genre != null ) { this.genre = genre; } } public MediaLink[] getTrailers() { return trailers; } public void setTrailers( MediaLink[] trailers ) { this.trailers = trailers; } public MediaLink[] getLinks() { return links; } public void setLinks( MediaLink[] links ) { this.links = links; } public String getLastWatched() { return lastWatched; } public void setLastWatched( String lastWatched ) { this.lastWatched = lastWatched; } public int compareTo( Movie arg0 ) { return getDisplayName().compareToIgnoreCase( arg0.getDisplayName() ); } @Override public Object clone() throws CloneNotSupportedException { final Movie movie = new Movie(); movie.setDescription( getDescription() ); movie.setDisplayName( getDisplayName() ); movie.setGenre( getGenre() ); movie.setId( getId() ); movie.setImdbId( getImdbId() ); movie.setLinks( getLinks() ); movie.setMpaaRating( getMpaaRating() ); movie.setName( getName() ); movie.setPosterUrl( getPosterUrl() ); movie.setRating( getRating() ); movie.setReleaseDate( getReleaseDate() ); movie.setTrailers( getTrailers() ); movie.setYear( getYear() ); movie.setLastWatched( getLastWatched() ); return movie; } }
/* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.vitess.jdbc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMap; import static org.mockito.Matchers.anyString; import static org.powermock.api.mockito.PowerMockito.mock; import static org.powermock.api.mockito.PowerMockito.when; import com.google.common.collect.ImmutableMap; import io.vitess.client.Context; import io.vitess.client.SQLFuture; import io.vitess.client.VTGateConnection; import io.vitess.client.VTSession; import io.vitess.client.cursor.Cursor; import io.vitess.client.cursor.CursorWithError; import io.vitess.mysql.DateTime; import io.vitess.proto.Query; import io.vitess.proto.Vtrpc; import io.vitess.util.Constants; import java.lang.reflect.Field; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.BatchUpdateException; import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.Map; import java.util.TimeZone; import javax.sql.rowset.serial.SerialClob; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Matchers; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; /** * Created by harshit.gangal on 09/02/16. */ @RunWith(PowerMockRunner.class) @PrepareForTest({VTGateConnection.class, Vtrpc.RPCError.class}) public class VitessPreparedStatementTest { private String sqlSelect = "select 1 from test_table"; private String sqlShow = "show tables"; private String sqlUpdate = "update test_table set msg = null"; private String sqlInsert = "insert into test_table(msg) values (?)"; @Test public void testStatementExecute() { VitessConnection mockConn = mock(VitessConnection.class); VitessPreparedStatement preparedStatement; try { preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); preparedStatement.executeQuery(sqlSelect); fail("Should have thrown exception for calling this method"); } catch (SQLException ex) { assertEquals("This method cannot be called using this class object", ex.getMessage()); } try { preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); preparedStatement.executeUpdate(sqlUpdate); fail("Should have thrown exception for calling this method"); } catch (SQLException ex) { assertEquals("This method cannot be called using this class object", ex.getMessage()); } try { preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); preparedStatement.execute(sqlShow); fail("Should have thrown exception for calling this method"); } catch (SQLException ex) { assertEquals("This method cannot be called using this class object", ex.getMessage()); } } @Test public void testExecuteQuery() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))). thenReturn(mockSqlFutureCursor); when(mockConn.getExecuteType()).thenReturn(Constants.QueryExecuteType.SIMPLE); when(mockConn.isSimpleExecute()).thenReturn(true); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursor); VitessPreparedStatement preparedStatement; try { //Empty Sql Statement try { new VitessPreparedStatement(mockConn, ""); fail("Should have thrown exception for empty sql"); } catch (SQLException ex) { assertEquals("SQL statement is not valid", ex.getMessage()); } //show query preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); ResultSet rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); //select on replica with bind variables preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); //select on replica without bind variables preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); //select on master rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); try { //when returned cursor is null when(mockSqlFutureCursor.checkedGet()).thenReturn(null); preparedStatement.executeQuery(); fail("Should have thrown exception for cursor null"); } catch (SQLException ex) { assertEquals("Failed to execute this method", ex.getMessage()); } } catch (SQLException e) { fail("Test failed " + e.getMessage()); } } @Test public void testExecuteQueryWithStream() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockVtGateConn .streamExecute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockCursor); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockSqlFutureCursor); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursor); when(mockConn.getExecuteType()).thenReturn(Constants.QueryExecuteType.STREAM); VitessPreparedStatement preparedStatement; try { //Empty Sql Statement try { new VitessPreparedStatement(mockConn, ""); fail("Should have thrown exception for empty sql"); } catch (SQLException ex) { assertEquals("SQL statement is not valid", ex.getMessage()); } //show query preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); ResultSet rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); //select on replica with bind variables preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); //select on replica without bind variables preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); //select on master rs = preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); try { //when returned cursor is null when(mockVtGateConn .streamExecute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(null); preparedStatement.executeQuery(); fail("Should have thrown exception for cursor null"); } catch (SQLException ex) { assertEquals("Failed to execute this method", ex.getMessage()); } } catch (SQLException e) { fail("Test failed " + e.getMessage()); } } @Test public void testExecuteUpdate() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); List<Query.Field> fieldList = mock(ArrayList.class); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockSqlFutureCursor); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursor); when(mockCursor.getFields()).thenReturn(Query.QueryResult.getDefaultInstance().getFieldsList()); VitessPreparedStatement preparedStatement; try { //executing dml on master preparedStatement = new VitessPreparedStatement(mockConn, sqlUpdate, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); int updateCount = preparedStatement.executeUpdate(); assertEquals(0, updateCount); //tx is null & autoCommit is true when(mockConn.getAutoCommit()).thenReturn(true); preparedStatement = new VitessPreparedStatement(mockConn, sqlUpdate); updateCount = preparedStatement.executeUpdate(); assertEquals(0, updateCount); //cursor fields is not null when(mockCursor.getFields()).thenReturn(fieldList); when(fieldList.isEmpty()).thenReturn(false); try { preparedStatement.executeUpdate(); fail("Should have thrown exception for field not null"); } catch (SQLException ex) { assertEquals("ResultSet generation is not allowed through this method", ex.getMessage()); } //cursor is null when(mockSqlFutureCursor.checkedGet()).thenReturn(null); try { preparedStatement.executeUpdate(); fail("Should have thrown exception for cursor null"); } catch (SQLException ex) { assertEquals("Failed to execute this method", ex.getMessage()); } //read only when(mockConn.isReadOnly()).thenReturn(true); try { preparedStatement.executeUpdate(); fail("Should have thrown exception for read only"); } catch (SQLException ex) { assertEquals(Constants.SQLExceptionMessages.READ_ONLY, ex.getMessage()); } //read only when(mockConn.isReadOnly()).thenReturn(true); try { preparedStatement.executeBatch(); fail("Should have thrown exception for read only"); } catch (SQLException ex) { assertEquals(Constants.SQLExceptionMessages.READ_ONLY, ex.getMessage()); } } catch (SQLException e) { fail("Test failed " + e.getMessage()); } } @Test public void testExecute() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); List<Query.Field> mockFieldList = PowerMockito.spy(new ArrayList<>()); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockSqlFutureCursor); when(mockConn.getExecuteType()).thenReturn(Constants.QueryExecuteType.SIMPLE); when(mockConn.isSimpleExecute()).thenReturn(true); when(mockConn.getAutoCommit()).thenReturn(true); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursor); when(mockCursor.getFields()).thenReturn(mockFieldList); VitessPreparedStatement preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try { int fieldSize = 5; when(mockCursor.getFields()).thenReturn(mockFieldList); PowerMockito.doReturn(fieldSize).when(mockFieldList).size(); PowerMockito.doReturn(false).when(mockFieldList).isEmpty(); boolean hasResultSet = preparedStatement.execute(); Assert.assertTrue(hasResultSet); Assert.assertNotNull(preparedStatement.getResultSet()); preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); hasResultSet = preparedStatement.execute(); Assert.assertTrue(hasResultSet); Assert.assertNotNull(preparedStatement.getResultSet()); int mockUpdateCount = 10; when(mockCursor.getFields()) .thenReturn(Query.QueryResult.getDefaultInstance().getFieldsList()); when(mockCursor.getRowsAffected()).thenReturn((long) mockUpdateCount); preparedStatement = new VitessPreparedStatement(mockConn, sqlUpdate); hasResultSet = preparedStatement.execute(); Assert.assertFalse(hasResultSet); Assert.assertNull(preparedStatement.getResultSet()); assertEquals(mockUpdateCount, preparedStatement.getUpdateCount()); //cursor is null when(mockSqlFutureCursor.checkedGet()).thenReturn(null); try { preparedStatement = new VitessPreparedStatement(mockConn, sqlShow); preparedStatement.execute(); fail("Should have thrown exception for cursor null"); } catch (SQLException ex) { assertEquals("Failed to execute this method", ex.getMessage()); } } catch (SQLException e) { fail("Test failed " + e.getMessage()); } } @Test public void testExecuteFetchSizeAsStreaming() throws SQLException { testExecute(5, true, false, true); testExecute(5, false, false, true); testExecute(0, true, true, false); testExecute(0, false, false, true); } private void testExecute(int fetchSize, boolean simpleExecute, boolean shouldRunExecute, boolean shouldRunStreamExecute) throws SQLException { VTGateConnection mockVtGateConn = mock(VTGateConnection.class); VitessConnection mockConn = mock(VitessConnection.class); when(mockConn.isSimpleExecute()).thenReturn(simpleExecute); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursor); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockSqlFutureCursor); when(mockVtGateConn .streamExecute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockCursor); VitessPreparedStatement statement = new VitessPreparedStatement(mockConn, sqlSelect); statement.setFetchSize(fetchSize); statement.executeQuery(); if (shouldRunExecute) { Mockito.verify(mockVtGateConn, Mockito.times(2)) .execute(any(Context.class), anyString(), anyMap(), any(VTSession.class)); } if (shouldRunStreamExecute) { Mockito.verify(mockVtGateConn) .streamExecute(any(Context.class), anyString(), anyMap(), any(VTSession.class)); } } @Test public void testGetUpdateCount() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFuture = mock(SQLFuture.class); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockSqlFuture); when(mockSqlFuture.checkedGet()).thenReturn(mockCursor); when(mockCursor.getFields()).thenReturn(Query.QueryResult.getDefaultInstance().getFieldsList()); VitessPreparedStatement preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect); try { when(mockCursor.getRowsAffected()).thenReturn(10L); int updateCount = preparedStatement.executeUpdate(); assertEquals(10L, updateCount); assertEquals(10L, preparedStatement.getUpdateCount()); // Truncated Update Count when(mockCursor.getRowsAffected()).thenReturn((long) Integer.MAX_VALUE + 10); updateCount = preparedStatement.executeUpdate(); assertEquals(Integer.MAX_VALUE, updateCount); assertEquals(Integer.MAX_VALUE, preparedStatement.getUpdateCount()); when(mockConn.isSimpleExecute()).thenReturn(true); preparedStatement.executeQuery(); assertEquals(-1, preparedStatement.getUpdateCount()); } catch (SQLException e) { fail("Test failed " + e.getMessage()); } } @Test public void testSetParameters() throws Exception { VitessConnection mockConn = mock(VitessConnection.class); Mockito.when(mockConn.getTreatUtilDateAsTimestamp()).thenReturn(true); VitessPreparedStatement preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect); Boolean boolValue = true; Byte byteValue = Byte.MAX_VALUE; Short shortValue = Short.MAX_VALUE; Integer intValue = Integer.MAX_VALUE; Long longValue = Long.MAX_VALUE; Float floatValue = Float.MAX_VALUE; Double doubleValue = Double.MAX_VALUE; BigDecimal bigDecimalValue = new BigDecimal(3.14159265358979323846); BigDecimal expectedDecimalValue = new BigDecimal("3.14159"); BigInteger bigIntegerValue = new BigInteger("18446744073709551615"); String stringValue = "vitess"; byte[] bytesValue = stringValue.getBytes(); Date dateValue = new Date(0); // Use a time value that won't go negative after adjusting for time zone. // The java.sql.Time class does not properly format negative times. Time timeValue = new Time(12 * 60 * 60 * 1000); Timestamp timestampValue = new Timestamp(0); preparedStatement.setNull(1, Types.INTEGER); preparedStatement.setBoolean(2, boolValue); preparedStatement.setByte(3, byteValue); preparedStatement.setShort(4, shortValue); preparedStatement.setInt(5, intValue); preparedStatement.setLong(6, longValue); preparedStatement.setFloat(7, floatValue); preparedStatement.setDouble(8, doubleValue); preparedStatement.setBigDecimal(9, bigDecimalValue); preparedStatement.setBigInteger(10, bigIntegerValue); preparedStatement.setString(11, stringValue); preparedStatement.setBytes(12, bytesValue); preparedStatement.setDate(13, dateValue); preparedStatement.setTime(14, timeValue); preparedStatement.setTimestamp(15, timestampValue); preparedStatement.setDate(16, dateValue, Calendar.getInstance(TimeZone.getDefault())); preparedStatement.setTime(17, timeValue, Calendar.getInstance(TimeZone.getDefault())); preparedStatement.setTimestamp(18, timestampValue, Calendar.getInstance(TimeZone.getDefault())); preparedStatement.setObject(19, boolValue); preparedStatement.setObject(20, byteValue); preparedStatement.setObject(21, shortValue); preparedStatement.setObject(22, intValue); preparedStatement.setObject(23, longValue); preparedStatement.setObject(24, floatValue); preparedStatement.setObject(25, doubleValue); preparedStatement.setObject(26, bigDecimalValue); preparedStatement.setObject(27, bigIntegerValue); preparedStatement.setObject(28, stringValue); preparedStatement.setObject(29, dateValue); preparedStatement.setObject(30, timeValue); preparedStatement.setObject(31, timestampValue); preparedStatement.setObject(32, 'a'); preparedStatement.setObject(33, null); preparedStatement.setObject(34, boolValue, Types.BOOLEAN, 0); preparedStatement.setObject(35, shortValue, Types.SMALLINT, 0); preparedStatement.setObject(36, longValue, Types.BIGINT, 0); preparedStatement.setObject(37, floatValue, Types.DOUBLE, 2); preparedStatement.setObject(38, doubleValue, Types.DOUBLE, 3); preparedStatement.setObject(39, bigDecimalValue, Types.DECIMAL, 5); preparedStatement.setObject(40, stringValue, Types.VARCHAR, 0); preparedStatement.setObject(41, dateValue, Types.DATE, 0); preparedStatement.setObject(42, timeValue, Types.TIME, 0); preparedStatement.setObject(43, timestampValue, Types.TIMESTAMP, 0); preparedStatement.setClob(44, new SerialClob("clob".toCharArray())); preparedStatement.setObject(45, bytesValue); Field bindVariablesMap = preparedStatement.getClass().getDeclaredField("bindVariables"); bindVariablesMap.setAccessible(true); Map<String, Object> bindVariables = (Map<String, Object>) bindVariablesMap .get(preparedStatement); assertEquals(null, bindVariables.get("v1")); assertEquals(boolValue, bindVariables.get("v2")); assertEquals(byteValue, bindVariables.get("v3")); assertEquals(shortValue, bindVariables.get("v4")); assertEquals(intValue, bindVariables.get("v5")); assertEquals(longValue, bindVariables.get("v6")); assertEquals(floatValue, bindVariables.get("v7")); assertEquals(doubleValue, bindVariables.get("v8")); assertEquals(bigDecimalValue, bindVariables.get("v9")); assertEquals(bigIntegerValue, bindVariables.get("v10")); assertEquals(stringValue, bindVariables.get("v11")); assertEquals(bytesValue, bindVariables.get("v12")); assertEquals(dateValue.toString(), bindVariables.get("v13")); assertEquals(timeValue.toString(), bindVariables.get("v14")); assertEquals(timestampValue.toString(), bindVariables.get("v15")); assertEquals(dateValue.toString(), bindVariables.get("v16")); assertEquals(timeValue.toString(), bindVariables.get("v17")); assertEquals(timestampValue.toString(), bindVariables.get("v18")); assertEquals(boolValue, bindVariables.get("v19")); assertEquals(byteValue, bindVariables.get("v20")); assertEquals(shortValue, bindVariables.get("v21")); assertEquals(intValue, bindVariables.get("v22")); assertEquals(longValue, bindVariables.get("v23")); assertEquals(floatValue, bindVariables.get("v24")); assertEquals(doubleValue, bindVariables.get("v25")); assertEquals(bigDecimalValue, bindVariables.get("v26")); assertEquals(bigIntegerValue, bindVariables.get("v27")); assertEquals(stringValue, bindVariables.get("v28")); assertEquals(dateValue.toString(), bindVariables.get("v29")); assertEquals(timeValue.toString(), bindVariables.get("v30")); assertEquals(timestampValue.toString(), bindVariables.get("v31")); assertEquals("a", bindVariables.get("v32")); assertEquals(null, bindVariables.get("v33")); assertEquals(true, bindVariables.get("v34")); assertEquals(shortValue.intValue(), bindVariables.get("v35")); assertEquals(longValue, bindVariables.get("v36")); assertEquals((double) floatValue, (double) bindVariables.get("v37"), 0.1); assertEquals(doubleValue, (double) bindVariables.get("v38"), 0.1); assertEquals(expectedDecimalValue, bindVariables.get("v39")); assertEquals(stringValue, bindVariables.get("v40")); assertEquals(dateValue.toString(), bindVariables.get("v41")); assertEquals(timeValue.toString(), bindVariables.get("v42")); assertEquals(timestampValue.toString(), bindVariables.get("v43")); assertEquals("clob", bindVariables.get("v44")); Assert.assertArrayEquals(bytesValue, (byte[]) bindVariables.get("v45")); preparedStatement.clearParameters(); } @Test public void testTreatUtilDateAsTimestamp() throws Exception { VitessConnection mockConn = mock(VitessConnection.class); VitessPreparedStatement preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect); java.util.Date utilDateValue = new java.util.Date(System.currentTimeMillis()); Timestamp timestamp = new Timestamp(utilDateValue.getTime()); try { preparedStatement.setObject(1, utilDateValue); fail("setObject on java.util.Date should have failed with SQLException"); } catch (SQLException e) { Assert.assertTrue(e.getMessage().startsWith(Constants.SQLExceptionMessages.SQL_TYPE_INFER)); } preparedStatement.clearParameters(); Mockito.when(mockConn.getTreatUtilDateAsTimestamp()).thenReturn(true); preparedStatement = new VitessPreparedStatement(mockConn, sqlSelect); preparedStatement.setObject(1, utilDateValue); Field bindVariablesMap = preparedStatement.getClass().getDeclaredField("bindVariables"); bindVariablesMap.setAccessible(true); Map<String, Object> bindVariables = (Map<String, Object>) bindVariablesMap .get(preparedStatement); assertEquals(DateTime.formatTimestamp(timestamp), bindVariables.get("v1")); } @Test public void testAutoGeneratedKeys() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); Cursor mockCursor = mock(Cursor.class); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockVtGateConn.execute(any(Context.class), anyString(), anyMap(), any(VTSession.class))) .thenReturn(mockSqlFutureCursor); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursor); when(mockCursor.getFields()).thenReturn(Query.QueryResult.getDefaultInstance().getFieldsList()); try { long expectedFirstGeneratedId = 121; long[] expectedGeneratedIds = {121, 122}; int expectedAffectedRows = 2; when(mockCursor.getInsertId()).thenReturn(expectedFirstGeneratedId); when(mockCursor.getRowsAffected()).thenReturn(Long.valueOf(expectedAffectedRows)); //Executing Insert Statement VitessPreparedStatement preparedStatement = new VitessPreparedStatement(mockConn, sqlInsert, Statement.RETURN_GENERATED_KEYS); int updateCount = preparedStatement.executeUpdate(); assertEquals(expectedAffectedRows, updateCount); ResultSet rs = preparedStatement.getGeneratedKeys(); int i = 0; while (rs.next()) { long generatedId = rs.getLong(1); assertEquals(expectedGeneratedIds[i++], generatedId); } } catch (SQLException e) { fail("Test failed " + e.getMessage()); } } @Test public void testAddBatch() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VitessPreparedStatement statement = new VitessPreparedStatement(mockConn, sqlInsert); try { statement.addBatch(this.sqlInsert); fail("Should have thrown Exception"); } catch (SQLException ex) { assertEquals(Constants.SQLExceptionMessages.METHOD_NOT_ALLOWED, ex.getMessage()); } statement.setString(1, "string1"); statement.addBatch(); try { Field privateStringField = VitessPreparedStatement.class.getDeclaredField("batchedArgs"); privateStringField.setAccessible(true); assertEquals("string1", (((List<Map<String, Object>>) privateStringField.get(statement)).get(0)).get("v1")); } catch (NoSuchFieldException e) { fail("Private Field should exists: batchedArgs"); } catch (IllegalAccessException e) { fail("Private Field should be accessible: batchedArgs"); } } @Test public void testClearBatch() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VitessPreparedStatement statement = new VitessPreparedStatement(mockConn, sqlInsert); statement.setString(1, "string1"); statement.addBatch(); statement.clearBatch(); try { Field privateStringField = VitessPreparedStatement.class.getDeclaredField("batchedArgs"); privateStringField.setAccessible(true); Assert.assertTrue(((List<Map<String, Object>>) privateStringField.get(statement)).isEmpty()); } catch (NoSuchFieldException e) { fail("Private Field should exists: batchedArgs"); } catch (IllegalAccessException e) { fail("Private Field should be accessible: batchedArgs"); } } @Test public void testExecuteBatch() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); VitessPreparedStatement statement = new VitessPreparedStatement(mockConn, sqlInsert); int[] updateCounts = statement.executeBatch(); assertEquals(0, updateCounts.length); VTGateConnection mockVtGateConn = mock(VTGateConnection.class); when(mockConn.getVtGateConn()).thenReturn(mockVtGateConn); when(mockConn.getAutoCommit()).thenReturn(true); SQLFuture mockSqlFutureCursor = mock(SQLFuture.class); when(mockVtGateConn.executeBatch(any(Context.class), Matchers.anyList(), Matchers.anyList(), any(VTSession.class))).thenReturn(mockSqlFutureCursor); List<CursorWithError> mockCursorWithErrorList = new ArrayList<>(); when(mockSqlFutureCursor.checkedGet()).thenReturn(mockCursorWithErrorList); CursorWithError mockCursorWithError1 = mock(CursorWithError.class); when(mockCursorWithError1.getError()).thenReturn(null); when(mockCursorWithError1.getCursor()).thenReturn(mock(Cursor.class)); mockCursorWithErrorList.add(mockCursorWithError1); statement.setString(1, "string1"); statement.addBatch(); updateCounts = statement.executeBatch(); assertEquals(1, updateCounts.length); CursorWithError mockCursorWithError2 = mock(CursorWithError.class); Vtrpc.RPCError rpcError = Vtrpc.RPCError.newBuilder() .setMessage("preparedStatement execute batch error").build(); when(mockCursorWithError2.getError()).thenReturn(rpcError); mockCursorWithErrorList.add(mockCursorWithError2); statement.setString(1, "string1"); statement.addBatch(); statement.setString(1, "string2"); statement.addBatch(); try { statement.executeBatch(); fail("Should have thrown Exception"); } catch (BatchUpdateException ex) { assertEquals(rpcError.toString(), ex.getMessage()); assertEquals(2, ex.getUpdateCounts().length); assertEquals(Statement.EXECUTE_FAILED, ex.getUpdateCounts()[1]); } } @Test public void testStatementCount() throws SQLException { VitessConnection mockConn = mock(VitessConnection.class); Map<String, Integer> testCases = ImmutableMap.<String, Integer>builder() .put("select * from foo where a = ?", 1).put("select * from foo where a = ? and b = ?", 2) .put("select * from foo where a = ? and b = \"?\"", 1) .put("select * from foo where a = ? and b = '?'", 1) .put("select * from foo where a = ? and b = `?`", 1) .put("select foo.*, `bar.baz?` from foo, bar where foo.a = ? and bar.b = foo.b", 1) .put("select * from foo where a = ? and b = \"`?`\"", 1) .put("select * from foo where a = ? --and b = ?", 1) .put("select * from foo where a = ? /* and b = ? */ and c = ?", 2) .put("/* leading comment? */ select * from foo where a = ? and b = ?", 2) .put("select * from foo where a = ? and b = ? and c = 'test' and d = ?", 3) .put("select * from foo where a = ? and b = \\`?\\`", 2) // not valid sql but validates escaping .put("select * from foo where a = ? and b = \\?", 1) // not valid sql but validates escaping .put("update foo set a = ?, b = ? where c = 'test' and d = ?", 3).put( "insert into foo (`a`, `b`) values (?, ?), (?, ?) on /* test? */ duplicate key update" + " a = \"?\"", 4).put("delete from foo where a = ? and b = '?'", 1).build(); for (Map.Entry<String, Integer> testCase : testCases.entrySet()) { VitessPreparedStatement statement = new VitessPreparedStatement(mockConn, testCase.getKey()); assertEquals(testCase.getKey(), testCase.getValue().longValue(), statement.getParameterMetaData().getParameterCount()); } } }
package com.fundynamic.d2tm.game.types; import com.fundynamic.d2tm.game.entities.EntityType; import com.fundynamic.d2tm.game.entities.entitiesdata.EntitiesData; import com.fundynamic.d2tm.game.entities.entitiesdata.EntitiesDataReader; import com.fundynamic.d2tm.game.entities.entitybuilders.EntityBuilderType; import com.fundynamic.d2tm.math.Coordinate; import com.fundynamic.d2tm.math.MapCoordinate; import com.fundynamic.d2tm.math.Vector2D; import com.fundynamic.d2tm.utils.StringUtils; import org.newdawn.slick.Image; import java.util.ArrayList; import java.util.List; import static com.fundynamic.d2tm.game.map.Cell.TILE_SIZE; /** * <h1>Overview</h1> * This is an object representation of an Entity. The {@link EntitiesData} class contains * all objects after reading the rules.ini file. The interpretation of the file and the construction of an {@link EntityData} class * is done by the {@link EntitiesDataReader}. * * <h2>Structures</h2> * <p>Example piece of rules.ini file for structures:</p> * <pre> * [STRUCTURES] * * [STRUCTURES/CONSTYARD] * image=structures/2x2_constyard.png * hitPoints=2000 * width=64 * height=64 * sight=5 * explosion=BOOM * * </pre> * <p>The structure has a reference to EXPLOSIONS which is implemented by (#{@link com.fundynamic.d2tm.game.entities.particle.Particle}</p> */ public class EntityData { public static final String UNKNOWN = "UNKNOWN"; /** * the name used in the INI file (ie [QUAD] without []) */ public String name; // Kind of entity it reflects public EntityType type; // Build related public EntityBuilderType entityBuilderType = EntityBuilderType.NONE; public float buildTimeInSeconds = 1.0F; public float buildRange = 0F; public int buildCost = -1; // cost to build this public Image buildIcon; // build icon public String buildList = ""; public Image image; // base image public Image barrelImage; // barrelImage (top image) private int facings; private int width; // in pixels private int height; // in pixels private int widthInCells; // in cells, derived from pixels private int heightInCells; // in cells, derived from pixels public int maxAscensionHeight; // in pixels, how high a projectile can ascend when 'launched' public float startToDescendPercentage; // normalised value (between 0 and 1.0), when should descend be initiated? public float maxAscensionAtFlightPercentage; // normalised value (between 0 and 1.0), when should the projectile be at maxAscensionHeight during flight? public int sight; public float moveSpeed; // the speed a unit moves: value is pixels in seconds. public float turnSpeed; // the speed a unit turns: value is facing angles in seconds. < 1 means the value is times per second public float turnSpeedCannon; // the speed a unit's barrel turns: value is facing angles in seconds. < 1 means the value is times per second public float attackRate; // the speed a unit attacks: < 1 means the value is times per second public float attackRange; // the range for a unit to attack in pixels public String weaponId = UNKNOWN; public int damage; public int hitPoints; // initial hitPoints when spawned public String explosionId = UNKNOWN; public float animationSpeed; // in frames per second, for animating public String key; // key used in HashMap public boolean recolor; // if 'true' then the particle will be recolored (into team color) before spawned // for turning private float chop = -1f; private float halfChop = -1f; public boolean hasMoveAnimation; // if true, entity can have walking animation and what not public SoundData soundData = null; // for playing sound if required public String onPlacementSpawnUnitId = UNKNOWN; // If given, upon placement of this Entity (structure) it will spawn this Unit ID next to it // Resource gathering related public float depositSpeed; // seconds it takes to deposit 'harvestCapacity' public float harvestSpeed; // seconds it takes to 'harvestCapacity' - given the unit would be able to harvest all on one cell public int harvestCapacity; // total amount of resources==credits this harvester can contain public boolean isHarvester; // if true, entity will execute harvesting logic, seeking spice, harvesting etc public boolean isRefinery; // if true, entity will be something where spice can be delivered // power resource related public int powerConsumption; // the amount of power this entity consumes when at 100% public int powerProduction; // the amount of power this entity produces when at 100% public float minimumPowerProductionPercentage = 0.25f; // how much an entity will produce (in %) given it has one 1 hp public EntityData() { } public EntityData(EntityType entityType, int width, int height, int sight) { this.type = entityType; setWidth(width); setHeight(height); this.sight = sight; } public Image getFirstImage() { return image.getSubImage(0, 0, width, height); } public void setWidth(int width) { this.width = width; if (this.type == null) throw new IllegalStateException("You can only set width after you have set type"); // units are always 1 cell in height (for now) if (this.type == EntityType.UNIT) { widthInCells = 1; } else { widthInCells = (int) Math.ceil((float) width / TILE_SIZE); } } public void setHeight(int height) { this.height = height; if (this.type == null) throw new IllegalStateException("You can only set height after you have set type"); // units are always 1 cell in height (for now) if (this.type == EntityType.UNIT) { heightInCells = 1; } else { heightInCells = (int) Math.ceil((float) height / TILE_SIZE); } } public int getWidth() { return width; } public int getHeight() { return height; } public int getWidthInCells() { return widthInCells; } public int getHeightInCells() { return heightInCells; } @Override public String toString() { return "EntityData{" + "chop=" + chop + ", halfChop=" + halfChop + ", type=" + type + ", image=" + image + ", barrelImage=" + barrelImage + ", width=" + width + ", height=" + height + ", widthInCells=" + widthInCells + ", heightInCells=" + heightInCells + ", sight=" + sight + ", moveSpeed=" + moveSpeed + ", turnSpeed=" + turnSpeed + ", turnSpeedCannon=" + turnSpeedCannon + ", attackRate=" + attackRate + ", attackRange=" + attackRange + ", hitPoints=" + hitPoints + ", facings=" + facings + ", damage=" + damage + ", explosionId='" + explosionId + '\'' + ", weaponId='" + weaponId + '\'' + ", animationSpeed=" + animationSpeed + ", key='" + key + '\'' + ", recolor=" + recolor + '}'; } public boolean hasFacings() { return facings > 0; } public void setFacingsAndCalculateChops(int facings) { this.facings = facings; this.chop = 360F / facings; this.halfChop = chop / 2F; } public float getChop() { return chop; } public int getFacings() { return facings; } public boolean hasExplosionId() { return !UNKNOWN.equals(explosionId); } public boolean hasWeaponId() { return !UNKNOWN.equals(weaponId); } public boolean hasOnPlacementSpawnUnitId() { return !UNKNOWN.equals(onPlacementSpawnUnitId); } public boolean hasSound() { return soundData != null; } public String getWeaponIdKey() { return constructKey(EntityType.PROJECTILE, weaponId); } public String getExplosionIdKey() { return constructKey(EntityType.PARTICLE, explosionId); } public static String constructKey(EntityType entityType, String id) { return entityType.toString() + "-" + id; } public float getRelativeDepositSpeed(float deltaInSeconds) { return getRelativeSpeed(getDepositSpeed(), deltaInSeconds); } public float getDepositSpeed() { return harvestCapacity / depositSpeed; } /** * This takes time into account as well. This makes the distance of moveSpeed equivalent to 1 second. * * @param deltaInSeconds * @return */ public float getRelativeMoveSpeed(float deltaInSeconds) { return getRelativeSpeed(moveSpeed, deltaInSeconds); } public float getRelativeHarvestSpeed(float deltaInSeconds) { return getRelativeSpeed(getHarvestSpeed(), deltaInSeconds); } /** * Capacity / harvestSpeed == amount to harvest in one second. Do not use this, but use {@link #getRelativeHarvestSpeed} instead * @return */ public float getHarvestSpeed() { return harvestCapacity / harvestSpeed; } /** * See @link getRelativeMoveSpeed * @param deltaInSeconds * @return */ public float getRelativeTurnSpeed(float deltaInSeconds) { return getRelativeSpeed(turnSpeed, deltaInSeconds); } public static float getRelativeSpeed(float speed, float deltaInSeconds) { return speed * deltaInSeconds; } public float getRelativeAttackRate(float deltaInSeconds) { return getRelativeSpeed(attackRate, deltaInSeconds); } public boolean isTypeStructure() { return EntityType.STRUCTURE.equals(this.type); } public boolean isTypeUnit() { return EntityType.UNIT.equals(this.type); } public boolean isTypeParticle() { return EntityType.PARTICLE.equals(this.type); } public boolean isTypeSuperPower() { return EntityType.SUPERPOWER.equals(this.type); } public boolean isTypeProjectile() { return EntityType.PROJECTILE.equals(this.type); } /** * Given a topLeftX and topLeftY coordinate, calculate all cells that are being occupied by this * entity and return that as a list of coordinates. These coordinates are top-left coordinates of cells. * * @return */ public List<MapCoordinate> getAllCellsAsCoordinates(Coordinate coordinate) { List<MapCoordinate> result = new ArrayList<>(widthInCells * heightInCells); for (int x = 0; x < widthInCells; x++) { for (int y = 0; y < heightInCells; y++) { int vecX = coordinate.getXAsInt() + (x * TILE_SIZE); int vecY = coordinate.getYAsInt() + (y * TILE_SIZE); result.add(Coordinate.create(vecX, vecY).toMapCoordinate()); } } return result; } /** * Given a topLeftX and topLeftY coordinate, calculate all cells that are being occupied by this * entity and return that as a list of coordinates. * * The coordinates are corrected to be centered within a cell. * * @return */ public List<Coordinate> getAllCellsAsCenteredCoordinates(Coordinate coordinate) { List<MapCoordinate> result = getAllCellsAsCoordinates(coordinate); List<Coordinate> centered = new ArrayList<>(result.size()); Vector2D halfCell = Vector2D.create(TILE_SIZE / 2, TILE_SIZE / 2); for (MapCoordinate resultCoordinate : result) { centered.add(resultCoordinate.toCoordinate().add(halfCell)); } return centered; } public Vector2D halfDimensions() { return Vector2D.create(width / 2, height / 2); } public List<String> getEntityDataKeysToBuild() { return StringUtils.splitLenientToList(buildList, ","); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EntityData that = (EntityData) o; if (Float.compare(that.buildTimeInSeconds, buildTimeInSeconds) != 0) return false; if (Float.compare(that.buildRange, buildRange) != 0) return false; if (buildCost != that.buildCost) return false; if (facings != that.facings) return false; if (width != that.width) return false; if (height != that.height) return false; if (widthInCells != that.widthInCells) return false; if (heightInCells != that.heightInCells) return false; if (maxAscensionHeight != that.maxAscensionHeight) return false; if (Float.compare(that.startToDescendPercentage, startToDescendPercentage) != 0) return false; if (Float.compare(that.maxAscensionAtFlightPercentage, maxAscensionAtFlightPercentage) != 0) return false; if (sight != that.sight) return false; if (Float.compare(that.moveSpeed, moveSpeed) != 0) return false; if (Float.compare(that.turnSpeed, turnSpeed) != 0) return false; if (Float.compare(that.turnSpeedCannon, turnSpeedCannon) != 0) return false; if (Float.compare(that.attackRate, attackRate) != 0) return false; if (Float.compare(that.attackRange, attackRange) != 0) return false; if (damage != that.damage) return false; if (hitPoints != that.hitPoints) return false; if (Float.compare(that.animationSpeed, animationSpeed) != 0) return false; if (recolor != that.recolor) return false; if (Float.compare(that.chop, chop) != 0) return false; if (Float.compare(that.halfChop, halfChop) != 0) return false; if (hasMoveAnimation != that.hasMoveAnimation) return false; if (Float.compare(that.depositSpeed, depositSpeed) != 0) return false; if (Float.compare(that.harvestSpeed, harvestSpeed) != 0) return false; if (harvestCapacity != that.harvestCapacity) return false; if (isHarvester != that.isHarvester) return false; if (isRefinery != that.isRefinery) return false; if (name != null ? !name.equals(that.name) : that.name != null) return false; if (type != that.type) return false; if (entityBuilderType != that.entityBuilderType) return false; if (buildIcon != null ? !buildIcon.equals(that.buildIcon) : that.buildIcon != null) return false; if (buildList != null ? !buildList.equals(that.buildList) : that.buildList != null) return false; if (image != null ? !image.equals(that.image) : that.image != null) return false; if (barrelImage != null ? !barrelImage.equals(that.barrelImage) : that.barrelImage != null) return false; if (weaponId != null ? !weaponId.equals(that.weaponId) : that.weaponId != null) return false; if (explosionId != null ? !explosionId.equals(that.explosionId) : that.explosionId != null) return false; if (key != null ? !key.equals(that.key) : that.key != null) return false; if (soundData != null ? !soundData.equals(that.soundData) : that.soundData != null) return false; return onPlacementSpawnUnitId != null ? onPlacementSpawnUnitId.equals(that.onPlacementSpawnUnitId) : that.onPlacementSpawnUnitId == null; } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + (type != null ? type.hashCode() : 0); result = 31 * result + (entityBuilderType != null ? entityBuilderType.hashCode() : 0); result = 31 * result + (buildTimeInSeconds != +0.0f ? Float.floatToIntBits(buildTimeInSeconds) : 0); result = 31 * result + (buildRange != +0.0f ? Float.floatToIntBits(buildRange) : 0); result = 31 * result + buildCost; result = 31 * result + (buildIcon != null ? buildIcon.hashCode() : 0); result = 31 * result + (buildList != null ? buildList.hashCode() : 0); result = 31 * result + (image != null ? image.hashCode() : 0); result = 31 * result + (barrelImage != null ? barrelImage.hashCode() : 0); result = 31 * result + facings; result = 31 * result + width; result = 31 * result + height; result = 31 * result + widthInCells; result = 31 * result + heightInCells; result = 31 * result + maxAscensionHeight; result = 31 * result + (startToDescendPercentage != +0.0f ? Float.floatToIntBits(startToDescendPercentage) : 0); result = 31 * result + (maxAscensionAtFlightPercentage != +0.0f ? Float.floatToIntBits(maxAscensionAtFlightPercentage) : 0); result = 31 * result + sight; result = 31 * result + (moveSpeed != +0.0f ? Float.floatToIntBits(moveSpeed) : 0); result = 31 * result + (turnSpeed != +0.0f ? Float.floatToIntBits(turnSpeed) : 0); result = 31 * result + (turnSpeedCannon != +0.0f ? Float.floatToIntBits(turnSpeedCannon) : 0); result = 31 * result + (attackRate != +0.0f ? Float.floatToIntBits(attackRate) : 0); result = 31 * result + (attackRange != +0.0f ? Float.floatToIntBits(attackRange) : 0); result = 31 * result + (weaponId != null ? weaponId.hashCode() : 0); result = 31 * result + damage; result = 31 * result + hitPoints; result = 31 * result + (explosionId != null ? explosionId.hashCode() : 0); result = 31 * result + (animationSpeed != +0.0f ? Float.floatToIntBits(animationSpeed) : 0); result = 31 * result + (key != null ? key.hashCode() : 0); result = 31 * result + (recolor ? 1 : 0); result = 31 * result + (chop != +0.0f ? Float.floatToIntBits(chop) : 0); result = 31 * result + (halfChop != +0.0f ? Float.floatToIntBits(halfChop) : 0); result = 31 * result + (hasMoveAnimation ? 1 : 0); result = 31 * result + (soundData != null ? soundData.hashCode() : 0); result = 31 * result + (depositSpeed != +0.0f ? Float.floatToIntBits(depositSpeed) : 0); result = 31 * result + (harvestSpeed != +0.0f ? Float.floatToIntBits(harvestSpeed) : 0); result = 31 * result + harvestCapacity; result = 31 * result + (isHarvester ? 1 : 0); result = 31 * result + (isRefinery ? 1 : 0); result = 31 * result + (onPlacementSpawnUnitId != null ? onPlacementSpawnUnitId.hashCode() : 0); return result; } public int getPowerProductionBasedOnHitpoints(float actualHitpoints) { return getProductionBasedOnHitPoints(actualHitpoints, this.powerProduction, this.minimumPowerProductionPercentage); } /** * Can be used for calculating the amount of 'repair speed', deposit speed, power generation and more. * This function basically calculates how much (percentage) the entity is damaged, and based on that calculates * the production of the given "anything" (power, deposit, etc). It takes into consideration the minimum % that * always should be outputted. * * Ie, if you want deposit speed based on HP, but never < 50% of its original speed. Provide 0.5F as minimumProductionPercentage * * @param actualHitpoints * @param productionOfAnything * @param minimumProductionPercentage * @return */ public int getProductionBasedOnHitPoints(float actualHitpoints, int productionOfAnything, float minimumProductionPercentage) { float remaining = 1.0f - minimumProductionPercentage; float damageFactor = (actualHitpoints / (float) hitPoints); float powerProductionFactor = minimumProductionPercentage + (damageFactor * remaining); return (int) Math.ceil(powerProductionFactor * productionOfAnything); } public boolean producesPower() { return powerProduction > 0; } public boolean consumesPower() { return powerConsumption > 0; } }
package com.mauriciotogneri.apply.compiler.lexical; import com.mauriciotogneri.apply.compiler.lexical.base.Position; import com.mauriciotogneri.apply.compiler.lexical.tokens.arithmetic.ArithmeticAdditionToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.arithmetic.ArithmeticDivisionToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.arithmetic.ArithmeticModuleToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.arithmetic.ArithmeticMultiplicationToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.arithmetic.ArithmeticPowerToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.arithmetic.ArithmeticSubtractionToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.comparison.ComparisonEqualToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.comparison.ComparisonGreaterEqualToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.comparison.ComparisonGreaterToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.comparison.ComparisonLessEqualToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.comparison.ComparisonLessToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.comparison.ComparisonNotEqualToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.conditional.ConditionalToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.logic.LogicAndToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.logic.LogicNotToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.logic.LogicOrToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.special.AssignmentToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.special.TypeOfToken; import com.mauriciotogneri.apply.compiler.lexical.tokens.special.TypeReturnToken; public abstract class Token implements Position { private final Lexeme lexeme; public Token(Lexeme lexeme) { this.lexeme = lexeme; } public boolean isSymbol() { return false; } public boolean isFunctionDef() { return false; } public boolean isOpenParenthesis() { return false; } public boolean isCloseParenthesis() { return false; } public boolean isNumber() { return false; } public boolean isBoolean() { return false; } public boolean isArithmetic() { return false; } public boolean isLogic() { return false; } public boolean isComparison() { return false; } public boolean isComma() { return false; } public boolean isIf() { return false; } public boolean isIfElse() { return false; } public boolean isElse() { return false; } public boolean isConditional() { return isIf() || isIfElse() || isElse(); } public boolean isNegation() { return false; } public boolean isEndIf() { return false; } public boolean isTypeOf() { return false; } public boolean isAssignment() { return false; } public boolean isOperator() { return isArithmetic() || isLogic() || isComparison() || isConditional() || isTypeOf() || isAssignment(); } public boolean isNewLine() { return false; } public boolean hasHigherPreference(Token token) { int thisPrecedence = precedence(); int tokenPrecedence = token.precedence(); return (thisPrecedence < tokenPrecedence) || ((thisPrecedence == tokenPrecedence) && (isLeftAssociative())); } private boolean isLeftAssociative() { return (this instanceof ArithmeticPowerToken) || (this instanceof LogicNotToken); } // 1 () [] {} . Function call, scope, array/member access // 2 ! ^ Most unary operators, sizeof and type casts (right to left) // 3 * / % Multiplication, division, modulo // 4 + - Addition and subtraction // 5 < <= > >= Comparisons: less-than, ... // 6 == != Comparisons: equal and not equal // 7 & Logical AND // 8 | Logical OR // 9 if elsif else end Conditional expression (ternary) // 10 = Assignment operators (right to left) // 11 , Comma operator private int precedence() { if ((this instanceof TypeOfToken) || (this instanceof TypeReturnToken)) { return 1; } else if ((this instanceof ArithmeticPowerToken) || (this instanceof LogicNotToken)) { return 2; } else if (this instanceof ArithmeticModuleToken) { return 3; } else if (this instanceof ArithmeticDivisionToken) { return 3; } else if (this instanceof ArithmeticMultiplicationToken) { return 3; } else if (this instanceof ArithmeticSubtractionToken) { return 4; } else if (this instanceof ArithmeticAdditionToken) { return 4; } else if ((this instanceof ComparisonLessToken) || (this instanceof ComparisonLessEqualToken) || (this instanceof ComparisonGreaterToken) || (this instanceof ComparisonGreaterEqualToken)) { return 5; } else if ((this instanceof ComparisonEqualToken) || (this instanceof ComparisonNotEqualToken)) { return 6; } else if (this instanceof LogicAndToken) { return 7; } else if (this instanceof LogicOrToken) { return 8; } else if (this instanceof ConditionalToken) { return 9; } else if (this instanceof AssignmentToken) { return 10; } else { throw new RuntimeException(); } } public String lexeme() { return lexeme.toString(); } @Override public int line() { return lexeme.line(); } @Override public int column() { return lexeme.column(); } @Override public String toString() { return lexeme.toString(); } }
package org.noetl.aws; import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient; import com.amazonaws.services.elasticmapreduce.model.BootstrapActionConfig; import com.amazonaws.services.elasticmapreduce.model.InstanceGroupConfig; import com.amazonaws.services.elasticmapreduce.model.InstanceRoleType; import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig; import com.amazonaws.services.elasticmapreduce.model.MarketType; import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest; import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult; import com.amazonaws.services.elasticmapreduce.model.ScriptBootstrapActionConfig; import com.amazonaws.services.elasticmapreduce.model.StepConfig; import com.amazonaws.services.elasticmapreduce.model.SupportedProductConfig; import com.amazonaws.services.elasticmapreduce.util.StepFactory; import org.noetl.automation.services.INotificationService; import org.noetl.pojos.clusterConfigs.BootStrapConf; import org.noetl.pojos.clusterConfigs.ClusterConf; import org.noetl.pojos.clusterConfigs.ClusterConfJson; import org.noetl.pojos.clusterConfigs.ClusterNodeConf; import org.noetl.pojos.clusterConfigs.InstanceTypeConf; import org.noetl.pojos.clusterConfigs.StepConfigConf; import org.noetl.utils.GeneralUtils; import org.apache.log4j.Logger; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; public class EMRClusterBuilder { private final static Logger logger = Logger.getLogger(EMRClusterBuilder.class); private final AmazonElasticMapReduceClient awsEMRClient; private final INotificationService notifier; private final ClusterConfJson clusterConfJson; public EMRClusterBuilder(AmazonElasticMapReduceClient awsEMRClient, INotificationService notifier, ClusterConfJson clusterConfJson) { this.awsEMRClient = awsEMRClient; this.notifier = notifier; this.clusterConfJson = clusterConfJson; } public String build() { RunJobFlowResult result = awsEMRClient.runJobFlow(configureRequest()); String jobFlowId = result.getJobFlowId(); logger.info("Starting cluster with id:" + jobFlowId); return jobFlowId; } private RunJobFlowRequest configureRequest() { logger.info("Start configuring the cluster..."); ClusterConf clusterConf = clusterConfJson.getCluster(); RunJobFlowRequest request = new RunJobFlowRequest() .withName(clusterConf.getName()) .withAmiVersion(clusterConf.getVersion()) .withSteps(configureStepConfigs(clusterConf.getStepConfigs())) .withNewSupportedProducts(configureProducts(clusterConf.getInstalls())) .withBootstrapActions(configureBootStraps(clusterConf.getBootStraps())) .withLogUri(clusterConf.getLogURI()) .withServiceRole(clusterConf.getServiceRole()) .withJobFlowRole(clusterConf.getJobFlowRole()) .withInstances( new JobFlowInstancesConfig() .withInstanceGroups(configureClusterNodes(clusterConf.getMasterNode(), clusterConf.getCoreNode())) .withKeepJobFlowAliveWhenNoSteps(true) .withEc2KeyName(clusterConfJson.getKey()) .withEc2SubnetId(clusterConf.getSubnet()) ); return request; } private List<BootstrapActionConfig> configureBootStraps(List<BootStrapConf> bootStrapConfs) { logger.info("Configuring boot straps..."); List<BootstrapActionConfig> bootStraps = new ArrayList<>(); for (BootStrapConf bootStrapConf : bootStrapConfs) { BootstrapActionConfig config = new BootstrapActionConfig() .withName(bootStrapConf.getName()) .withScriptBootstrapAction(new ScriptBootstrapActionConfig() .withPath(bootStrapConf.getScript())); bootStraps.add(config); } return bootStraps; } private List<StepConfig> configureStepConfigs(List<StepConfigConf> stepConfigConfs) { logger.info("Configuring step configs..."); StepFactory stepFactory = new StepFactory(); List<StepConfig> ret = new ArrayList<>(); StepConfig enableDebugging = new StepConfig() .withName("Enable debugging") .withActionOnFailure("TERMINATE_JOB_FLOW") .withHadoopJarStep(stepFactory.newEnableDebuggingStep()); ret.add(enableDebugging); //Add this for all clusters. if (stepConfigConfs.size() > 0) throw new RuntimeException("The step configs feature is currently not supported."); /* for (org.noetl.pojos.clusterConfigs.StepConfig stepConfig : stepConfigs) { if (stepConfig.isUseDefault()) { StepConfig sc = new StepConfig() .withName(stepConfig.getName()) .withActionOnFailure("TERMINATE_JOB_FLOW") .withHadoopJarStep(stepFactory.newInstallHiveStep()); } else { throw new RuntimeException("The feature(useDefault=false for stepConfig) is currently not supported."); } ret.add(sc); } */ return ret; } private static Collection<SupportedProductConfig> configureProducts(List<String> installs) { logger.info("Configuring products to be installed..."); ArrayList<SupportedProductConfig> supportedProductConfigs = new ArrayList<>(); for (String product : installs) { supportedProductConfigs.add(new SupportedProductConfig().withName(product)); } return supportedProductConfigs; } private Collection<InstanceGroupConfig> configureClusterNodes(ClusterNodeConf masterNode, ClusterNodeConf coreNode) { logger.info("Configuring cluster nodes..."); Collection<InstanceGroupConfig> InstanceGroup = new ArrayList<>(); InstanceGroupConfig master = configureClusterNode(masterNode, InstanceRoleType.MASTER); InstanceGroup.add(master); InstanceGroupConfig core = configureClusterNode(coreNode, InstanceRoleType.CORE); InstanceGroup.add(core); return InstanceGroup; } private InstanceGroupConfig configureClusterNode(ClusterNodeConf nodeConfig, InstanceRoleType roleType) { String marketTypeString = nodeConfig.getMarketType().toLowerCase(); MarketType marketType; switch (marketTypeString) { case "on_demand": marketType = MarketType.ON_DEMAND; break; case "spot": marketType = MarketType.SPOT; break; default: throw new RuntimeException("Unknown market type value " + marketTypeString); } InstanceTypeConf instanceTypeConf = nodeConfig.getInstanceType(); String type = instanceTypeConf.getType().toLowerCase(); String instanceTypeString; EMRBid bid = null; switch (type) { case "size": instanceTypeString = instanceTypeConf.getSize(); //On_demand if (marketType.equals(MarketType.SPOT)) { bid = getEMRBid(nodeConfig); } break; case "tier": bid = getEMRBid(nodeConfig); instanceTypeString = bid.getSize(); break; default: throw new RuntimeException("Unknown instance type category " + type); } InstanceGroupConfig instanceGroupConfig = new InstanceGroupConfig() .withInstanceCount(nodeConfig.getCount()) .withInstanceRole(roleType) .withInstanceType(instanceTypeString) .withMarket(marketType); if (marketType.equals(MarketType.SPOT)) instanceGroupConfig.withBidPrice(bid.getBidPrice()); return instanceGroupConfig; } private EMRBid getEMRBid(ClusterNodeConf nodeConf) { InstanceTypeConf instanceTypeConf = nodeConf.getInstanceType(); String type = instanceTypeConf.getType().toLowerCase(); EMRBid bid; try { logger.info("Getting best bid price for nodes..."); EMRBidder emrBidder = new EMRBidder(clusterConfJson.getSpotPriceURL(), clusterConfJson.getCurrency()); switch (type) { case "tier": bid = emrBidder.bestBidByTier(clusterConfJson.getRegion(), nodeConf.getOs(), clusterConfJson.getTiers().get(instanceTypeConf.getTier())); break; case "size": bid = emrBidder.getSpotForSize(clusterConfJson.getRegion(), nodeConf.getOs(), instanceTypeConf.getSize()); break; default: throw new RuntimeException("Unknown instance type category " + type); } logger.info(String.format("Bidding for size %s at price %s", bid.getSize(), bid.getBidPrice())); return bid; } catch (IOException e) { String subject = "Fail to create a bid price for EMR slaves"; notifier.notify(subject, GeneralUtils.getStackTrace(e)); throw new RuntimeException(subject, e); } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.serialization.impl; import com.hazelcast.internal.serialization.InternalSerializationService; import java.io.EOFException; import java.nio.ByteOrder; import static com.hazelcast.internal.memory.GlobalMemoryAccessor.MEM_COPY_THRESHOLD; import static com.hazelcast.internal.memory.GlobalMemoryAccessorRegistry.MEM; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BOOLEAN_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BOOLEAN_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BYTE_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BYTE_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_CHAR_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_CHAR_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_DOUBLE_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_DOUBLE_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_FLOAT_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_FLOAT_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_INT_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_INT_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_LONG_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_LONG_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_SHORT_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_SHORT_INDEX_SCALE; import static com.hazelcast.nio.Bits.CHAR_SIZE_IN_BYTES; import static com.hazelcast.nio.Bits.DOUBLE_SIZE_IN_BYTES; import static com.hazelcast.nio.Bits.FLOAT_SIZE_IN_BYTES; import static com.hazelcast.nio.Bits.INT_SIZE_IN_BYTES; import static com.hazelcast.nio.Bits.LONG_SIZE_IN_BYTES; import static com.hazelcast.nio.Bits.NULL_ARRAY_LENGTH; import static com.hazelcast.nio.Bits.SHORT_SIZE_IN_BYTES; class UnsafeObjectDataInput extends ByteArrayObjectDataInput { UnsafeObjectDataInput(byte[] buffer, InternalSerializationService service) { super(buffer, service, ByteOrder.nativeOrder()); } UnsafeObjectDataInput(byte[] buffer, int offset, InternalSerializationService service) { super(buffer, offset, service, ByteOrder.nativeOrder()); } @Override public int read() { return (pos < size) ? MEM.getByte(data, ARRAY_BYTE_BASE_OFFSET + pos++) & 0xFF : -1; } @Override public int read(int position) { return (position < size) ? MEM.getByte(data, ARRAY_BYTE_BASE_OFFSET + position) : NULL_ARRAY_LENGTH; } @Override public char readChar(int position) throws EOFException { checkAvailable(position, CHAR_SIZE_IN_BYTES); return MEM.getChar(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public double readDouble() throws EOFException { final double d = readDouble(pos); pos += DOUBLE_SIZE_IN_BYTES; return d; } @Override public double readDouble(int position) throws EOFException { checkAvailable(position, DOUBLE_SIZE_IN_BYTES); return MEM.getDouble(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public float readFloat() throws EOFException { final float f = readFloat(pos); pos += FLOAT_SIZE_IN_BYTES; return f; } @Override public float readFloat(int position) throws EOFException { checkAvailable(position, FLOAT_SIZE_IN_BYTES); return MEM.getFloat(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public int readInt(int position) throws EOFException { checkAvailable(position, INT_SIZE_IN_BYTES); return MEM.getInt(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public int readInt(int position, ByteOrder byteOrder) throws EOFException { int v = readInt(position); if (byteOrder != ByteOrder.nativeOrder()) { v = Integer.reverseBytes(v); } return v; } @Override public long readLong(int position) throws EOFException { checkAvailable(position, LONG_SIZE_IN_BYTES); return MEM.getLong(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public long readLong(int position, ByteOrder byteOrder) throws EOFException { long v = readLong(position); if (byteOrder != ByteOrder.nativeOrder()) { v = Long.reverseBytes(v); } return v; } @Override public short readShort(int position) throws EOFException { checkAvailable(position, SHORT_SIZE_IN_BYTES); return MEM.getShort(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public short readShort(int position, ByteOrder byteOrder) throws EOFException { short v = readShort(position); if (byteOrder != ByteOrder.nativeOrder()) { v = Short.reverseBytes(v); } return v; } @Override public char[] readCharArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { char[] values = new char[len]; memCopy(values, ARRAY_CHAR_BASE_OFFSET, len, ARRAY_CHAR_INDEX_SCALE); return values; } return new char[0]; } @Override public boolean[] readBooleanArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { boolean[] values = new boolean[len]; memCopy(values, ARRAY_BOOLEAN_BASE_OFFSET, len, ARRAY_BOOLEAN_INDEX_SCALE); return values; } return new boolean[0]; } @Override public byte[] readByteArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { byte[] values = new byte[len]; memCopy(values, ARRAY_BYTE_BASE_OFFSET, len, ARRAY_BYTE_INDEX_SCALE); return values; } return new byte[0]; } @Override public int[] readIntArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { int[] values = new int[len]; memCopy(values, ARRAY_INT_BASE_OFFSET, len, ARRAY_INT_INDEX_SCALE); return values; } return new int[0]; } @Override public long[] readLongArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { long[] values = new long[len]; memCopy(values, ARRAY_LONG_BASE_OFFSET, len, ARRAY_LONG_INDEX_SCALE); return values; } return new long[0]; } @Override public double[] readDoubleArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { double[] values = new double[len]; memCopy(values, ARRAY_DOUBLE_BASE_OFFSET, len, ARRAY_DOUBLE_INDEX_SCALE); return values; } return new double[0]; } @Override public float[] readFloatArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { float[] values = new float[len]; memCopy(values, ARRAY_FLOAT_BASE_OFFSET, len, ARRAY_FLOAT_INDEX_SCALE); return values; } return new float[0]; } @Override public short[] readShortArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { short[] values = new short[len]; memCopy(values, ARRAY_SHORT_BASE_OFFSET, len, ARRAY_SHORT_INDEX_SCALE); return values; } return new short[0]; } private void memCopy(final Object dest, final long destOffset, final int length, final int indexScale) throws EOFException { if (length < 0) { throw new NegativeArraySizeException("Destination length is negative: " + length); } int remaining = length * indexScale; checkAvailable(pos, remaining); long offset = destOffset; while (remaining > 0) { int chunk = (remaining > MEM_COPY_THRESHOLD) ? MEM_COPY_THRESHOLD : remaining; MEM.copyMemory(data, ARRAY_BYTE_BASE_OFFSET + pos, dest, offset, chunk); remaining -= chunk; offset += chunk; pos += chunk; } } @Override public ByteOrder getByteOrder() { return ByteOrder.nativeOrder(); } @Override public String toString() { return "UnsafeObjectDataInput{" + "size=" + size + ", pos=" + pos + ", mark=" + mark + ", byteOrder=" + getByteOrder() + '}'; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.xcontent; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lease.Releasable; import java.io.IOException; import java.util.List; import java.util.Map; /** * Interface for pull - parsing {@link XContent} see {@link XContentType} for supported types. * * To obtain an instance of this class use the following pattern: * * <pre> * XContentType xContentType = XContentType.JSON; * XContentParser parser = xContentType.xContent().createParser("{\"key\" : \"value\"}"); * </pre> */ public interface XContentParser extends Releasable { enum Token { START_OBJECT { @Override public boolean isValue() { return false; } }, END_OBJECT { @Override public boolean isValue() { return false; } }, START_ARRAY { @Override public boolean isValue() { return false; } }, END_ARRAY { @Override public boolean isValue() { return false; } }, FIELD_NAME { @Override public boolean isValue() { return false; } }, VALUE_STRING { @Override public boolean isValue() { return true; } }, VALUE_NUMBER { @Override public boolean isValue() { return true; } }, VALUE_BOOLEAN { @Override public boolean isValue() { return true; } }, // usually a binary value VALUE_EMBEDDED_OBJECT { @Override public boolean isValue() { return true; } }, VALUE_NULL { @Override public boolean isValue() { return false; } }; public abstract boolean isValue(); } enum NumberType { INT, LONG, FLOAT, DOUBLE } XContentType contentType(); Token nextToken() throws IOException; void skipChildren() throws IOException; Token currentToken(); String currentName() throws IOException; Map<String, Object> map() throws IOException; Map<String, Object> mapOrdered() throws IOException; List<Object> list() throws IOException; List<Object> listOrderedMap() throws IOException; String text() throws IOException; String textOrNull() throws IOException; /** * Returns a BytesRef holding UTF-8 bytes or null if a null value is {@link Token#VALUE_NULL}. * This method should be used to read text only binary content should be read through {@link #binaryValue()} */ BytesRef utf8BytesOrNull() throws IOException; /** * Returns a BytesRef holding UTF-8 bytes. * This method should be used to read text only binary content should be read through {@link #binaryValue()} */ BytesRef utf8Bytes() throws IOException; Object objectText() throws IOException; Object objectBytes() throws IOException; /** * Method that can be used to determine whether calling of textCharacters() would be the most efficient way to * access textual content for the event parser currently points to. * * Default implementation simply returns false since only actual * implementation class has knowledge of its internal buffering * state. * * This method shouldn't be used to check if the token contains text or not. */ boolean hasTextCharacters(); char[] textCharacters() throws IOException; int textLength() throws IOException; int textOffset() throws IOException; Number numberValue() throws IOException; NumberType numberType() throws IOException; /** * Is the number type estimated or not (i.e. an int might actually be a long, its just low enough * to be an int). */ boolean estimatedNumberType(); short shortValue(boolean coerce) throws IOException; int intValue(boolean coerce) throws IOException; long longValue(boolean coerce) throws IOException; float floatValue(boolean coerce) throws IOException; double doubleValue(boolean coerce) throws IOException; short shortValue() throws IOException; int intValue() throws IOException; long longValue() throws IOException; float floatValue() throws IOException; double doubleValue() throws IOException; /** * returns true if the current value is boolean in nature. * values that are considered booleans: * - boolean value (true/false) * - numeric integers (=0 is considered as false, !=0 is true) * - one of the following strings: "true","false","on","off","yes","no","1","0" */ boolean isBooleanValue() throws IOException; boolean booleanValue() throws IOException; /** * Reads a plain binary value that was written via one of the following methods: * * <ul> * <li>{@link XContentBuilder#field(String, org.apache.lucene.util.BytesRef)}</li> * <li>{@link XContentBuilder#field(String, org.elasticsearch.common.bytes.BytesReference)}</li> * <li>{@link XContentBuilder#field(String, byte[], int, int)}}</li> * <li>{@link XContentBuilder#field(String, byte[])}}</li> * </ul> * * as well as via their <code>XContentBuilderString</code> variants of the separated value methods. * Note: Do not use this method to read values written with: * <ul> * <li>{@link XContentBuilder#utf8Field(XContentBuilderString, org.apache.lucene.util.BytesRef)}</li> * <li>{@link XContentBuilder#utf8Field(String, org.apache.lucene.util.BytesRef)}</li> * </ul> * * these methods write UTF-8 encoded strings and must be read through: * <ul> * <li>{@link XContentParser#utf8Bytes()}</li> * <li>{@link XContentParser#utf8BytesOrNull()}}</li> * <li>{@link XContentParser#text()} ()}</li> * <li>{@link XContentParser#textOrNull()} ()}</li> * <li>{@link XContentParser#textCharacters()} ()}}</li> * </ul> * */ byte[] binaryValue() throws IOException; /** * Used for error reporting to highlight where syntax errors occur in * content being parsed. * * @return last token's location or null if cannot be determined */ XContentLocation getTokenLocation(); boolean isClosed(); /** * Returns this parsers {@link ParseFieldMatcher} */ ParseFieldMatcher getParseFieldMatcher(); /** * Sets this parsers {@link ParseFieldMatcher} */ void setParseFieldMatcher(ParseFieldMatcher matcher) ; }
/* * Copyright 2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.util.internal; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; /** * Selects a single item from a collection based on a camel case pattern. */ public class NameMatcher { private final SortedSet<String> matches = new TreeSet<>(); private final Set<String> candidates = new TreeSet<>(); private String pattern; /** * Locates the best match for a camel case pattern in a key set of a map and returns the corresponding value. * * @return The matching item if exactly 1 match found, null if no matches or multiple matches. * @see #find(String, Collection) */ public <T> T find(String pattern, Map<String, ? extends T> items) { String name = find(pattern, items.keySet()); if (name != null) { return items.get(name); } return null; } /** * Locates the best match for a camel case pattern in a collection. * <p> * The pattern is expanded to match on camel case and on kebab case strings. For example, the pattern {@code gBD} * matches to {@code gradleBinaryDistribution} and {@code gradle-binary-distribution}. * <p> * The method will return {@code null} if the pattern is an empty string. * <p> * If the target collection contains the pattern string then the method omits the pattern matching and returns the pattern. * * @return The match if exactly 1 match found, null if no matches or multiple matches. */ public String find(String pattern, Collection<String> items) { this.pattern = pattern; matches.clear(); candidates.clear(); if (items.contains(pattern)) { matches.add(pattern); return pattern; } if (pattern.length() == 0) { return null; } Pattern camelCasePattern = getPatternForName(pattern); Pattern normalisedCamelCasePattern = Pattern.compile(camelCasePattern.pattern(), Pattern.CASE_INSENSITIVE); String normalisedPattern = pattern.toUpperCase(); Pattern kebabCasePattern = getKebabCasePatternForName(pattern); Pattern kebabCasePrefixPattern = Pattern.compile(kebabCasePattern.pattern() + "[\\p{javaLowerCase}\\p{Digit}-]*"); Set<String> caseInsensitiveMatches = new TreeSet<>(); Set<String> caseSensitiveCamelCaseMatches = new TreeSet<>(); Set<String> caseInsensitiveCamelCaseMatches = new TreeSet<>(); Set<String> kebabCaseMatches = new TreeSet<>(); Set<String> kebabCasePrefixMatches = new TreeSet<>(); for (String candidate : items) { boolean found = false; if (candidate.equalsIgnoreCase(pattern)) { caseInsensitiveMatches.add(candidate); found = true; } if (camelCasePattern.matcher(candidate).matches()) { caseSensitiveCamelCaseMatches.add(candidate); found = true; } if (normalisedCamelCasePattern.matcher(candidate).lookingAt()) { caseInsensitiveCamelCaseMatches.add(candidate); found = true; } if (kebabCasePattern.matcher(candidate).matches()) { kebabCaseMatches.add(candidate); found = true; } if (kebabCasePrefixPattern.matcher(candidate).matches()) { kebabCasePrefixMatches.add(candidate); found = true; } if (!found && StringUtils.getLevenshteinDistance(normalisedPattern, candidate.toUpperCase()) <= Math.min(3, pattern.length() / 2)) { candidates.add(candidate); } } if (!caseInsensitiveMatches.isEmpty()) { matches.addAll(caseInsensitiveMatches); } else if (!caseSensitiveCamelCaseMatches.isEmpty()) { matches.addAll(caseSensitiveCamelCaseMatches); } else if (kebabCaseMatches.isEmpty() && kebabCasePrefixMatches.isEmpty()) { matches.addAll(caseInsensitiveCamelCaseMatches); } if (!kebabCaseMatches.isEmpty()) { matches.addAll(kebabCaseMatches); } else if (!kebabCasePrefixMatches.isEmpty()) { matches.addAll(kebabCasePrefixMatches); } if (matches.size() == 1) { return matches.first(); } return null; } private static Pattern getPatternForName(String name) { Pattern boundaryPattern = Pattern.compile("((^|\\p{Punct})\\p{javaLowerCase}+)|(\\p{javaUpperCase}\\p{javaLowerCase}*)"); Matcher matcher = boundaryPattern.matcher(name); int pos = 0; StringBuilder builder = new StringBuilder(); while (matcher.find()) { String prefix = name.substring(pos, matcher.start()); if (prefix.length() > 0) { builder.append(Pattern.quote(prefix)); } builder.append(Pattern.quote(matcher.group())); builder.append("[\\p{javaLowerCase}\\p{Digit}]*"); pos = matcher.end(); } builder.append(Pattern.quote(name.substring(pos))); return Pattern.compile(builder.toString()); } private static Pattern getKebabCasePatternForName(String name) { Pattern boundaryPattern = Pattern.compile("((^|\\p{Punct})\\p{javaLowerCase}+)|(\\p{javaUpperCase}\\p{javaLowerCase}*)"); Matcher matcher = boundaryPattern.matcher(name); int pos = 0; StringBuilder builder = new StringBuilder(); while (matcher.find()) { String prefix = name.substring(pos, matcher.start()); if (prefix.length() > 0) { builder.append(Pattern.quote(prefix)); } if (pos > 0) { builder.append('-'); } builder.append(Pattern.quote(matcher.group().toLowerCase())); builder.append("[\\p{javaLowerCase}\\p{Digit}]*"); pos = matcher.end(); } builder.append(Pattern.quote(name.substring(pos))); return Pattern.compile(builder.toString()); } /** * Returns all matches, when there were more than 1. * * @return The matches. Returns an empty set when there are no matches. */ public Set<String> getMatches() { return matches; } /** * Returns the potential matches, if any. * * @return The matches. Returns an empty set when there are no potential matches. */ public Set<String> getCandidates() { return candidates; } /** * Returns a formatted error message describing why the pattern matching failed. * * @return The error message. */ public String formatErrorMessage(String singularItemDescription, Object container) { String capItem = StringUtils.capitalize(singularItemDescription); if (!matches.isEmpty()) { return String.format("%s '%s' is ambiguous in %s. Candidates are: %s.", capItem, pattern, container, GUtil.toString(matches)); } if (!candidates.isEmpty()) { return String.format("%s '%s' not found in %s. Some candidates are: %s.", capItem, pattern, container, GUtil.toString(candidates)); } return String.format("%s '%s' not found in %s.", capItem, pattern, container); } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.orm.jpa; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Map; import java.util.UUID; import javax.persistence.EntityManagerFactory; import javax.sql.DataSource; import org.hibernate.engine.transaction.jta.platform.internal.NoJtaPlatform; import org.junit.Test; import org.springframework.boot.autoconfigure.AutoConfigurations; import org.springframework.boot.autoconfigure.TestAutoConfigurationPackage; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration; import org.springframework.boot.autoconfigure.orm.jpa.test.City; import org.springframework.boot.autoconfigure.transaction.TransactionAutoConfiguration; import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.boot.test.context.assertj.AssertableApplicationContext; import org.springframework.boot.test.context.runner.ApplicationContextRunner; import org.springframework.boot.test.context.runner.ContextConsumer; import org.springframework.boot.test.context.runner.WebApplicationContextRunner; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.JpaVendorAdapter; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.orm.jpa.persistenceunit.DefaultPersistenceUnitManager; import org.springframework.orm.jpa.persistenceunit.PersistenceUnitManager; import org.springframework.orm.jpa.support.OpenEntityManagerInViewFilter; import org.springframework.orm.jpa.support.OpenEntityManagerInViewInterceptor; import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; /** * Base for JPA tests and tests for {@link JpaBaseConfiguration}. * * @author Phillip Webb * @author Dave Syer * @author Stephane Nicoll */ public abstract class AbstractJpaAutoConfigurationTests { private final Class<?> autoConfiguredClass; private final ApplicationContextRunner contextRunner; protected AbstractJpaAutoConfigurationTests(Class<?> autoConfiguredClass) { this.autoConfiguredClass = autoConfiguredClass; this.contextRunner = new ApplicationContextRunner() .withPropertyValues("spring.datasource.generate-unique-name=true") .withUserConfiguration(TestConfiguration.class).withConfiguration( AutoConfigurations.of(DataSourceAutoConfiguration.class, TransactionAutoConfiguration.class, autoConfiguredClass)); } protected ApplicationContextRunner contextRunner() { return this.contextRunner; } @Test public void notConfiguredIfDataSourceIsNotAvailable() { new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(this.autoConfiguredClass)) .run(assertJpaIsNotAutoConfigured()); } @Test public void notConfiguredIfNoSingleDataSourceCandidateIsAvailable() { new ApplicationContextRunner() .withUserConfiguration(TestTwoDataSourcesConfiguration.class) .withConfiguration(AutoConfigurations.of(this.autoConfiguredClass)) .run(assertJpaIsNotAutoConfigured()); } protected ContextConsumer<AssertableApplicationContext> assertJpaIsNotAutoConfigured() { return (context) -> { assertThat(context).hasNotFailed(); assertThat(context).hasSingleBean(JpaProperties.class); assertThat(context).doesNotHaveBean(PlatformTransactionManager.class); assertThat(context).doesNotHaveBean(EntityManagerFactory.class); }; } @Test public void configuredWithAutoConfiguredDataSource() { this.contextRunner.run((context) -> { assertThat(context).hasSingleBean(DataSource.class); assertThat(context).hasSingleBean(JpaTransactionManager.class); assertThat(context).hasSingleBean(EntityManagerFactory.class); }); } @Test public void configuredWithSingleCandidateDataSource() { this.contextRunner .withUserConfiguration(TestTwoDataSourcesAndPrimaryConfiguration.class) .run((context) -> { assertThat(context).getBeans(DataSource.class).hasSize(2); assertThat(context).hasSingleBean(JpaTransactionManager.class); assertThat(context).hasSingleBean(EntityManagerFactory.class); }); } @Test public void jtaTransactionManagerTakesPrecedence() { this.contextRunner .withConfiguration(AutoConfigurations .of(DataSourceTransactionManagerAutoConfiguration.class)) .run((context) -> { assertThat(context).hasSingleBean(DataSource.class); assertThat(context).hasSingleBean(JpaTransactionManager.class); assertThat(context).getBean("transactionManager") .isInstanceOf(JpaTransactionManager.class); }); } @Test public void openEntityManagerInViewInterceptorIsCreated() { new WebApplicationContextRunner() .withPropertyValues("spring.datasource.generate-unique-name=true") .withUserConfiguration(TestConfiguration.class) .withConfiguration(AutoConfigurations.of( DataSourceAutoConfiguration.class, TransactionAutoConfiguration.class, this.autoConfiguredClass)) .run((context) -> assertThat(context) .hasSingleBean(OpenEntityManagerInViewInterceptor.class)); } @Test public void openEntityManagerInViewInterceptorIsNotRegisteredWhenFilterPresent() { new WebApplicationContextRunner() .withPropertyValues("spring.datasource.generate-unique-name=true") .withUserConfiguration(TestFilterConfiguration.class) .withConfiguration(AutoConfigurations.of( DataSourceAutoConfiguration.class, TransactionAutoConfiguration.class, this.autoConfiguredClass)) .run((context) -> assertThat(context) .doesNotHaveBean(OpenEntityManagerInViewInterceptor.class)); } @Test public void openEntityManagerInViewInterceptorISNotRegisteredWhenExplicitlyOff() { new WebApplicationContextRunner() .withPropertyValues("spring.datasource.generate-unique-name=true", "spring.jpa.open-in-view=false") .withUserConfiguration(TestConfiguration.class) .withConfiguration(AutoConfigurations.of( DataSourceAutoConfiguration.class, TransactionAutoConfiguration.class, this.autoConfiguredClass)) .run((context) -> assertThat(context) .doesNotHaveBean(OpenEntityManagerInViewInterceptor.class)); } @Test public void customJpaProperties() { this.contextRunner .withPropertyValues("spring.jpa.properties.a:b", "spring.jpa.properties.a.b:c", "spring.jpa.properties.c:d") .run((context) -> { LocalContainerEntityManagerFactoryBean bean = context .getBean(LocalContainerEntityManagerFactoryBean.class); Map<String, Object> map = bean.getJpaPropertyMap(); assertThat(map.get("a")).isEqualTo("b"); assertThat(map.get("c")).isEqualTo("d"); assertThat(map.get("a.b")).isEqualTo("c"); }); } @Test public void usesManuallyDefinedLocalContainerEntityManagerFactoryBeanIfAvailable() { this.contextRunner .withUserConfiguration( TestConfigurationWithLocalContainerEntityManagerFactoryBean.class) .run((context) -> { LocalContainerEntityManagerFactoryBean factoryBean = context .getBean(LocalContainerEntityManagerFactoryBean.class); Map<String, Object> map = factoryBean.getJpaPropertyMap(); assertThat(map.get("configured")).isEqualTo("manually"); }); } @Test public void usesManuallyDefinedEntityManagerFactoryIfAvailable() { this.contextRunner .withUserConfiguration( TestConfigurationWithLocalContainerEntityManagerFactoryBean.class) .run((context) -> { EntityManagerFactory factoryBean = context .getBean(EntityManagerFactory.class); Map<String, Object> map = factoryBean.getProperties(); assertThat(map.get("configured")).isEqualTo("manually"); }); } @Test public void usesManuallyDefinedTransactionManagerBeanIfAvailable() { this.contextRunner .withUserConfiguration(TestConfigurationWithTransactionManager.class) .run((context) -> { PlatformTransactionManager txManager = context .getBean(PlatformTransactionManager.class); assertThat(txManager).isInstanceOf(CustomJpaTransactionManager.class); }); } @Test public void customPersistenceUnitManager() { this.contextRunner .withUserConfiguration( TestConfigurationWithCustomPersistenceUnitManager.class) .run((context) -> { LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = context .getBean(LocalContainerEntityManagerFactoryBean.class); Field field = LocalContainerEntityManagerFactoryBean.class .getDeclaredField("persistenceUnitManager"); field.setAccessible(true); assertThat(field.get(entityManagerFactoryBean)) .isEqualTo(context.getBean(PersistenceUnitManager.class)); }); } @Configuration protected static class TestTwoDataSourcesConfiguration { @Bean public DataSource firstDataSource() { return createRandomDataSource(); } @Bean public DataSource secondDataSource() { return createRandomDataSource(); } private DataSource createRandomDataSource() { String url = "jdbc:h2:mem:init-" + UUID.randomUUID().toString(); return DataSourceBuilder.create().url(url).build(); } } @Configuration static class TestTwoDataSourcesAndPrimaryConfiguration { @Bean @Primary public DataSource firstDataSource() { return createRandomDataSource(); } @Bean public DataSource secondDataSource() { return createRandomDataSource(); } private DataSource createRandomDataSource() { String url = "jdbc:h2:mem:init-" + UUID.randomUUID().toString(); return DataSourceBuilder.create().url(url).build(); } } @Configuration @TestAutoConfigurationPackage(City.class) protected static class TestConfiguration { } @Configuration @TestAutoConfigurationPackage(City.class) protected static class TestFilterConfiguration { @Bean public OpenEntityManagerInViewFilter openEntityManagerInViewFilter() { return new OpenEntityManagerInViewFilter(); } } @Configuration protected static class TestConfigurationWithLocalContainerEntityManagerFactoryBean extends TestConfiguration { @Bean public LocalContainerEntityManagerFactoryBean entityManagerFactory( DataSource dataSource, JpaVendorAdapter adapter) { LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean(); factoryBean.setJpaVendorAdapter(adapter); factoryBean.setDataSource(dataSource); factoryBean.setPersistenceUnitName("manually-configured"); Map<String, Object> properties = new HashMap<>(); properties.put("configured", "manually"); properties.put("hibernate.transaction.jta.platform", NoJtaPlatform.INSTANCE); factoryBean.setJpaPropertyMap(properties); return factoryBean; } } @Configuration protected static class TestConfigurationWithEntityManagerFactory extends TestConfiguration { @Bean public EntityManagerFactory entityManagerFactory(DataSource dataSource, JpaVendorAdapter adapter) { LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean(); factoryBean.setJpaVendorAdapter(adapter); factoryBean.setDataSource(dataSource); factoryBean.setPersistenceUnitName("manually-configured"); Map<String, Object> properties = new HashMap<>(); properties.put("configured", "manually"); properties.put("hibernate.transaction.jta.platform", NoJtaPlatform.INSTANCE); factoryBean.setJpaPropertyMap(properties); factoryBean.afterPropertiesSet(); return factoryBean.getObject(); } @Bean public PlatformTransactionManager transactionManager(EntityManagerFactory emf) { JpaTransactionManager transactionManager = new JpaTransactionManager(); transactionManager.setEntityManagerFactory(emf); return transactionManager; } } @Configuration @TestAutoConfigurationPackage(City.class) protected static class TestConfigurationWithTransactionManager { @Bean public PlatformTransactionManager transactionManager() { return new CustomJpaTransactionManager(); } } @Configuration @TestAutoConfigurationPackage(AbstractJpaAutoConfigurationTests.class) public static class TestConfigurationWithCustomPersistenceUnitManager { private final DataSource dataSource; public TestConfigurationWithCustomPersistenceUnitManager(DataSource dataSource) { this.dataSource = dataSource; } @Bean public PersistenceUnitManager persistenceUnitManager() { DefaultPersistenceUnitManager persistenceUnitManager = new DefaultPersistenceUnitManager(); persistenceUnitManager.setDefaultDataSource(this.dataSource); persistenceUnitManager.setPackagesToScan(City.class.getPackage().getName()); return persistenceUnitManager; } } @SuppressWarnings("serial") static class CustomJpaTransactionManager extends JpaTransactionManager { } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.settings; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.spi.LoggingEvent; import org.apache.lucene.util.LuceneTestCase.Slow; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.merge.policy.TieredMergePolicyProvider; import org.elasticsearch.index.merge.scheduler.ConcurrentMergeSchedulerProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.support.AbstractIndexStore; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.Test; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsTests extends ElasticsearchIntegrationTest { @Test public void testOpenCloseUpdateSettings() throws Exception { createIndex("test"); try { client().admin().indices().prepareUpdateSettings("test") .setSettings(ImmutableSettings.settingsBuilder() .put("index.refresh_interval", -1) // this one can change .put("index.cache.filter.type", "none") // this one can't ) .execute().actionGet(); fail(); } catch (ElasticsearchIllegalArgumentException e) { // all is well } IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.settings().get("index.refresh_interval"), nullValue()); assertThat(indexMetaData.settings().get("index.cache.filter.type"), nullValue()); // Now verify via dedicated get settings api: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), nullValue()); assertThat(getSettingsResponse.getSetting("test", "index.cache.filter.type"), nullValue()); client().admin().indices().prepareUpdateSettings("test") .setSettings(ImmutableSettings.settingsBuilder() .put("index.refresh_interval", -1) // this one can change ) .execute().actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.settings().get("index.refresh_interval"), equalTo("-1")); // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("-1")); // now close the index, change the non dynamic setting, and see that it applies // Wait for the index to turn green before attempting to close it ClusterHealthResponse health = client().admin().cluster().prepareHealth().setTimeout("30s").setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); client().admin().indices().prepareClose("test").execute().actionGet(); client().admin().indices().prepareUpdateSettings("test") .setSettings(ImmutableSettings.settingsBuilder() .put("index.refresh_interval", "1s") // this one can change .put("index.cache.filter.type", "none") // this one can't ) .execute().actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.settings().get("index.refresh_interval"), equalTo("1s")); assertThat(indexMetaData.settings().get("index.cache.filter.type"), equalTo("none")); // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("1s")); assertThat(getSettingsResponse.getSetting("test", "index.cache.filter.type"), equalTo("none")); } @Test public void testEngineGCDeletesSetting() throws InterruptedException { createIndex("test"); client().prepareIndex("test", "type", "1").setSource("f", 1).get(); // set version to 1 client().prepareDelete("test", "type", "1").get(); // sets version to 2 client().prepareIndex("test", "type", "1").setSource("f", 2).setVersion(2).get(); // delete is still in cache this should work & set version to 3 client().admin().indices().prepareUpdateSettings("test") .setSettings(ImmutableSettings.settingsBuilder() .put("index.gc_deletes", 0) ).get(); client().prepareDelete("test", "type", "1").get(); // sets version to 4 Thread.sleep(300); // wait for cache time to change TODO: this needs to be solved better. To be discussed. assertThrows(client().prepareIndex("test", "type", "1").setSource("f", 3).setVersion(4), VersionConflictEngineException.class); // delete is should not be in cache } // #6626: make sure we can update throttle settings and the changes take effect @Test @Slow public void testUpdateThrottleSettings() { // No throttling at first, only 1 non-replicated shard, force lots of merging: assertAcked(prepareCreate("test") .setSettings(ImmutableSettings.builder() .put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "none") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(TieredMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") .put(TieredMergePolicyProvider.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") .put(ConcurrentMergeSchedulerProvider.MAX_THREAD_COUNT, "1") .put(ConcurrentMergeSchedulerProvider.MAX_MERGE_COUNT, "2") .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) // get stats all the time - no caching )); ensureGreen(); long termUpto = 0; for(int i=0;i<100;i++) { // Provoke slowish merging by making many unique terms: StringBuilder sb = new StringBuilder(); for(int j=0;j<100;j++) { sb.append(' '); sb.append(termUpto++); } client().prepareIndex("test", "type", ""+termUpto).setSource("field" + (i%10), sb.toString()).get(); if (i % 2 == 0) { refresh(); } } // No merge IO throttling should have happened: NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); for(NodeStats stats : nodesStats.getNodes()) { assertThat(stats.getIndices().getStore().getThrottleTime().getMillis(), equalTo(0l)); } logger.info("test: set low merge throttling"); // Now updates settings to turn on merge throttling lowish rate client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(ImmutableSettings.builder() .put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") .put(AbstractIndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, "1mb")) .get(); // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE), equalTo("merge")); // Also make sure we see throttling kicking in: boolean done = false; while (done == false) { // Provoke slowish merging by making many unique terms: for(int i=0;i<5;i++) { StringBuilder sb = new StringBuilder(); for(int j=0;j<100;j++) { sb.append(' '); sb.append(termUpto++); sb.append(" some random text that keeps repeating over and over again hambone"); } client().prepareIndex("test", "type", ""+termUpto).setSource("field" + (i%10), sb.toString()).get(); } refresh(); nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); for(NodeStats stats : nodesStats.getNodes()) { long throttleMillis = stats.getIndices().getStore().getThrottleTime().getMillis(); if (throttleMillis > 0) { done = true; break; } } } logger.info("test: disable merge throttling"); // Now updates settings to disable merge throttling client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(ImmutableSettings.builder() .put(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE, "none")) .get(); // Optimize does a waitForMerges, which we must do to make sure all in-flight (throttled) merges finish: logger.info("test: optimize"); client().admin().indices().prepareOptimize("test").setMaxNumSegments(1).get(); logger.info("test: optimize done"); // Record current throttling so far long sumThrottleTime = 0; nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); for(NodeStats stats : nodesStats.getNodes()) { sumThrottleTime += stats.getIndices().getStore().getThrottleTime().getMillis(); } // Make sure no further throttling happens: for(int i=0;i<100;i++) { // Provoke slowish merging by making many unique terms: StringBuilder sb = new StringBuilder(); for(int j=0;j<100;j++) { sb.append(' '); sb.append(termUpto++); } client().prepareIndex("test", "type", ""+termUpto).setSource("field" + (i%10), sb.toString()).get(); if (i % 2 == 0) { refresh(); } } logger.info("test: done indexing after disabling throttling"); long newSumThrottleTime = 0; nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); for(NodeStats stats : nodesStats.getNodes()) { newSumThrottleTime += stats.getIndices().getStore().getThrottleTime().getMillis(); } // No additional merge IO throttling should have happened: assertEquals(sumThrottleTime, newSumThrottleTime); // Optimize & flush and wait; else we sometimes get a "Delete Index failed - not acked" // when ElasticsearchIntegrationTest.after tries to remove indices created by the test: // Wait for merges to finish client().admin().indices().prepareOptimize("test").get(); flush(); logger.info("test: test done"); } private static class MockAppender extends AppenderSkeleton { public boolean sawIndexWriterMessage; public boolean sawFlushDeletes; public boolean sawMergeThreadPaused; public boolean sawUpdateMaxThreadCount; public boolean sawUpdateAutoThrottle; @Override protected void append(LoggingEvent event) { String message = event.getMessage().toString(); if (event.getLevel() == Level.TRACE && event.getLoggerName().endsWith("lucene.iw")) { sawFlushDeletes |= message.contains("IW: apply all deletes during flush"); sawMergeThreadPaused |= message.contains("CMS: pause thread"); } if (event.getLevel() == Level.INFO && message.contains("updating [index.merge.scheduler.max_thread_count] from [10000] to [1]")) { sawUpdateMaxThreadCount = true; } if (event.getLevel() == Level.INFO && message.contains("updating [index.merge.scheduler.auto_throttle] from [true] to [false]")) { sawUpdateAutoThrottle = true; } } @Override public boolean requiresLayout() { return false; } @Override public void close() { } } @Test public void testUpdateAutoThrottleSettings() { MockAppender mockAppender = new MockAppender(); Logger rootLogger = Logger.getRootLogger(); Level savedLevel = rootLogger.getLevel(); rootLogger.addAppender(mockAppender); rootLogger.setLevel(Level.TRACE); try { // No throttling at first, only 1 non-replicated shard, force lots of merging: assertAcked(prepareCreate("test") .setSettings(ImmutableSettings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(TieredMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") .put(TieredMergePolicyProvider.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") .put(ConcurrentMergeSchedulerProvider.MAX_THREAD_COUNT, "1") .put(ConcurrentMergeSchedulerProvider.MAX_MERGE_COUNT, "2") .put(ConcurrentMergeSchedulerProvider.AUTO_THROTTLE, "true") )); // Disable auto throttle: client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(ImmutableSettings.builder() .put(ConcurrentMergeSchedulerProvider.AUTO_THROTTLE, "no")) .get(); // Make sure we log the change: assertTrue(mockAppender.sawUpdateAutoThrottle); // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", ConcurrentMergeSchedulerProvider.AUTO_THROTTLE), equalTo("no")); } finally { rootLogger.removeAppender(mockAppender); rootLogger.setLevel(savedLevel); } } // #6882: make sure we can change index.merge.scheduler.max_thread_count live @Test public void testUpdateMergeMaxThreadCount() { MockAppender mockAppender = new MockAppender(); Logger rootLogger = Logger.getRootLogger(); Level savedLevel = rootLogger.getLevel(); rootLogger.addAppender(mockAppender); rootLogger.setLevel(Level.TRACE); try { assertAcked(prepareCreate("test") .setSettings(ImmutableSettings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(TieredMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") .put(TieredMergePolicyProvider.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") .put(MergeSchedulerModule.MERGE_SCHEDULER_TYPE_KEY, ConcurrentMergeSchedulerProvider.class) .put(ConcurrentMergeSchedulerProvider.MAX_THREAD_COUNT, "10000") .put(ConcurrentMergeSchedulerProvider.MAX_MERGE_COUNT, "10000") )); assertFalse(mockAppender.sawUpdateMaxThreadCount); // Now make a live change to reduce allowed merge threads: client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(ImmutableSettings.builder() .put(ConcurrentMergeSchedulerProvider.MAX_THREAD_COUNT, "1") ) .get(); // Make sure we log the change: assertTrue(mockAppender.sawUpdateMaxThreadCount); // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", ConcurrentMergeSchedulerProvider.MAX_THREAD_COUNT), equalTo("1")); } finally { rootLogger.removeAppender(mockAppender); rootLogger.setLevel(savedLevel); } } }
package org.glob3.mobile.generated; // // HUDImageRenderer.cpp // G3MiOSSDK // // Created by Diego Gomez Deck on 9/27/13. // // // // HUDImageRenderer.hpp // G3MiOSSDK // // Created by Diego Gomez Deck on 9/27/13. // // //class Mesh; //class ICanvas; public class HUDImageRenderer extends DefaultRenderer { public interface ImageFactory { public void dispose(); void create(G3MRenderContext rc, int width, int height, IImageListener listener, boolean deleteListener); } public abstract static class CanvasImageFactory implements HUDImageRenderer.ImageFactory { protected abstract void drawOn(ICanvas canvas, int width, int height); public CanvasImageFactory() { } public final void create(G3MRenderContext rc, int width, int height, IImageListener listener, boolean deleteListener) { ICanvas canvas = rc.getFactory().createCanvas(); canvas.initialize(width, height); drawOn(canvas, width, height); canvas.createImage(listener, deleteListener); if (canvas != null) canvas.dispose(); } } private static long INSTANCE_COUNTER = 0; private long _instanceID; private long _changeCounter; private static class ImageListener extends IImageListener { private HUDImageRenderer _hudImageRenderer; public ImageListener(HUDImageRenderer hudImageRenderer) { _hudImageRenderer = hudImageRenderer; } public final void imageCreated(IImage image) { _hudImageRenderer.setImage(image); } } private GLState _glState; private Mesh _mesh; private HUDImageRenderer.ImageFactory _imageFactory; private boolean _creatingMesh; private Mesh getMesh(G3MRenderContext rc) { if (_mesh == null) { if (!_creatingMesh) { if (_image == null) { _creatingMesh = true; final Camera camera = rc.getCurrentCamera(); final int width = camera.getViewPortWidth(); final int height = camera.getViewPortHeight(); _imageFactory.create(rc, width, height, new HUDImageRenderer.ImageListener(this), true); } } if (_image != null) { _mesh = createMesh(rc); } } return _mesh; } private Mesh createMesh(G3MRenderContext rc) { _creatingMesh = false; if (_mesh != null) { if (_mesh != null) _mesh.dispose(); _mesh = null; } final IStringUtils su = IStringUtils.instance(); final String textureName = "HUDImageRenderer" + su.toString(_instanceID) + "/" + su.toString(_changeCounter++); final TextureIDReference texId = rc.getTexturesHandler().getTextureIDReference(_image, GLFormat.rgba(), textureName, false); _image = null; _image = null; if (texId == null) { rc.getLogger().logError("Can't upload texture to GPU"); return null; } final Camera camera = rc.getCurrentCamera(); final double halfWidth = camera.getViewPortWidth() / 2.0; final double halfHeight = camera.getViewPortHeight() / 2.0; FloatBufferBuilderFromCartesian3D vertices = FloatBufferBuilderFromCartesian3D.builderWithoutCenter(); vertices.add(-halfWidth, halfHeight, 0); vertices.add(-halfWidth, -halfHeight, 0); vertices.add(halfWidth, halfHeight, 0); vertices.add(halfWidth, -halfHeight, 0); DirectMesh mesh = new DirectMesh(GLPrimitive.triangleStrip(), true, vertices.getCenter(), vertices.create(), 1, 1); if (vertices != null) vertices.dispose(); FloatBufferBuilderFromCartesian2D texCoords = new FloatBufferBuilderFromCartesian2D(); texCoords.add(0, 0); texCoords.add(0, 1); texCoords.add(1, 0); texCoords.add(1, 1); TextureMapping textureMapping = new SimpleTextureMapping(texId, texCoords.create(), true, true); return new TexturedMesh(mesh, true, textureMapping, true, true); } private IImage _image; private void setImage(IImage image) { _image = image; } public HUDImageRenderer(HUDImageRenderer.ImageFactory imageFactory) { _imageFactory = imageFactory; _glState = new GLState(); _creatingMesh = false; _image = null; _mesh = null; _instanceID = INSTANCE_COUNTER++; _changeCounter = 0; } public final void initialize(G3MContext context) { } public final void render(G3MRenderContext rc, GLState glState) { Mesh mesh = getMesh(rc); if (mesh != null) { mesh.render(rc, _glState); } } public final void onResizeViewportEvent(G3MEventContext ec, int width, int height) { final int halfWidth = width / 2; final int halfHeight = height / 2; MutableMatrix44D projectionMatrix = MutableMatrix44D.createOrthographicProjectionMatrix(-halfWidth, halfWidth, -halfHeight, halfHeight, -halfWidth, halfWidth); ProjectionGLFeature pr = (ProjectionGLFeature) _glState.getGLFeature(GLFeatureID.GLF_PROJECTION); if (pr == null) { _glState.addGLFeature(new ProjectionGLFeature(projectionMatrix.asMatrix44D()), false); } else { pr.setMatrix(projectionMatrix.asMatrix44D()); } recreateImage(); } public void dispose() { _glState._release(); if (_mesh != null) _mesh.dispose(); _image = null; if (_imageFactory != null) _imageFactory.dispose(); super.dispose(); } public final void recreateImage() { _creatingMesh = false; if (_mesh != null) _mesh.dispose(); _mesh = null; _image = null; _image = null; } public final void stop(G3MRenderContext rc) { recreateImage(); } public final HUDImageRenderer.ImageFactory getImageFactory() { return _imageFactory; } }
/** * Copyright (c) 2012-2013 Reficio (TM) - Reestablish your software!. All Rights Reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301 USA */ package com.ibm.soatf.component.soap.builder; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import javax.wsdl.*; import javax.wsdl.extensions.ExtensibilityElement; import javax.wsdl.extensions.mime.MIMEContent; import javax.wsdl.extensions.mime.MIMEMultipartRelated; import javax.wsdl.extensions.mime.MIMEPart; import javax.wsdl.extensions.soap.*; import javax.wsdl.extensions.soap12.*; import javax.xml.XMLConstants; import javax.xml.namespace.QName; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * This class was extracted from the soapUI code base by centeractive ag in October 2011. * The main reason behind the extraction was to separate the code that is responsible * for the generation of the SOAP messages from the rest of the soapUI's code that is * tightly coupled with other modules, such as soapUI's graphical user interface, etc. * The goal was to create an open-source java project whose main responsibility is to * handle SOAP message generation and SOAP transmission purely on an XML level. * <br/> * centeractive ag would like to express strong appreciation to SmartBear Software and * to the whole team of soapUI's developers for creating soapUI and for releasing its * source code under a free and open-source licence. centeractive ag extracted and * modifies some parts of the soapUI's code in good faith, making every effort not * to impair any existing functionality and to supplement it according to our * requirements, applying best practices of software design. * * Changes done: * - changing location in the package structure * - removal of dependencies and code parts that are out of scope of SOAP message generation * - minor fixes to make the class compile out of soapUI's code base */ /** * Wsdl-related tools * * @author Ole.Matzura */ @SuppressWarnings("unchecked") public class WsdlUtils { private final static Logger log = LogManager.getLogger(WsdlUtils.class); public static <T extends ExtensibilityElement> T getExtensiblityElement(List<?> list, Class<T> clazz) { List<T> elements = getExtensiblityElements(list, clazz); return elements.isEmpty() ? null : elements.get(0); } public static <T extends ExtensibilityElement> List<T> getExtensiblityElements(List list, Class<T> clazz) { List<T> result = new ArrayList<T>(); for (Iterator<T> i = list.iterator(); i.hasNext(); ) { T elm = i.next(); if (clazz.isAssignableFrom(elm.getClass())) { result.add(elm); } } return result; } public static Binding findBindingForOperation(Definition definition, BindingOperation bindingOperation) { Map services = definition.getAllServices(); Iterator<Service> s = services.values().iterator(); while (s.hasNext()) { Map ports = s.next().getPorts(); Iterator<Port> p = ports.values().iterator(); while (p.hasNext()) { Binding binding = p.next().getBinding(); List bindingOperations = binding.getBindingOperations(); for (Iterator iter = bindingOperations.iterator(); iter.hasNext(); ) { BindingOperation op = (BindingOperation) iter.next(); if (op.getName().equals(bindingOperation.getName())) return binding; } } } Map bindings = definition.getAllBindings(); Iterator<QName> names = bindings.keySet().iterator(); while (names.hasNext()) { Binding binding = definition.getBinding(names.next()); List bindingOperations = binding.getBindingOperations(); for (Iterator iter = bindingOperations.iterator(); iter.hasNext(); ) { BindingOperation op = (BindingOperation) iter.next(); if (op.getName().equals(bindingOperation.getName())) return binding; } } return null; } public static boolean isInputSoapEncoded(BindingOperation bindingOperation) { if (bindingOperation == null) return false; BindingInput bindingInput = bindingOperation.getBindingInput(); if (bindingInput == null) return false; SOAPBody soapBody = WsdlUtils.getExtensiblityElement(bindingInput.getExtensibilityElements(), SOAPBody.class); if (soapBody != null) { return soapBody.getUse() != null && soapBody.getUse().equalsIgnoreCase("encoded") && (soapBody.getEncodingStyles() == null || soapBody.getEncodingStyles().contains( "http://schemas.xmlsoap.org/soap/encoding/")); } SOAP12Body soap12Body = WsdlUtils.getExtensiblityElement(bindingInput.getExtensibilityElements(), SOAP12Body.class); if (soap12Body != null) { return soap12Body.getUse() != null && soap12Body.getUse().equalsIgnoreCase("encoded") && (soap12Body.getEncodingStyle() == null || soap12Body.getEncodingStyle().equals( "http://www.w3.org/2001/12/soap-encoding")); } return false; } public static boolean isOutputSoapEncoded(BindingOperation bindingOperation) { if (bindingOperation == null) return false; BindingOutput bindingOutput = bindingOperation.getBindingOutput(); if (bindingOutput == null) return false; SOAPBody soapBody = WsdlUtils.getExtensiblityElement(bindingOutput.getExtensibilityElements(), SOAPBody.class); if (soapBody != null) { return soapBody.getUse() != null && soapBody.getUse().equalsIgnoreCase("encoded") && (soapBody.getEncodingStyles() == null || soapBody.getEncodingStyles().contains( "http://schemas.xmlsoap.org/soap/encoding/")); } SOAP12Body soap12Body = WsdlUtils.getExtensiblityElement(bindingOutput.getExtensibilityElements(), SOAP12Body.class); if (soap12Body != null) { return soap12Body.getUse() != null && soap12Body.getUse().equalsIgnoreCase("encoded") && (soap12Body.getEncodingStyle() == null || soap12Body.getEncodingStyle().equals( "http://schemas.xmlsoap.org/soap/encoding/")); } return false; } public static boolean isRpc(Definition definition, BindingOperation bindingOperation) { SOAPOperation soapOperation = WsdlUtils.getExtensiblityElement(bindingOperation.getExtensibilityElements(), SOAPOperation.class); if (soapOperation != null && soapOperation.getStyle() != null) return soapOperation.getStyle().equalsIgnoreCase("rpc"); SOAP12Operation soap12Operation = WsdlUtils.getExtensiblityElement(bindingOperation.getExtensibilityElements(), SOAP12Operation.class); if (soap12Operation != null && soap12Operation.getStyle() != null) return soap12Operation.getStyle().equalsIgnoreCase("rpc"); Binding binding = findBindingForOperation(definition, bindingOperation); if (binding == null) { log.error("Failed to find binding for operation [" + bindingOperation.getName() + "] in definition [" + definition.getDocumentBaseURI() + "]"); return false; } return isRpc(binding); } public static boolean isRpc(Binding binding) { SOAPBinding soapBinding = WsdlUtils .getExtensiblityElement(binding.getExtensibilityElements(), SOAPBinding.class); if (soapBinding != null) return "rpc".equalsIgnoreCase(soapBinding.getStyle()); SOAP12Binding soap12Binding = WsdlUtils.getExtensiblityElement(binding.getExtensibilityElements(), SOAP12Binding.class); if (soap12Binding != null) return "rpc".equalsIgnoreCase(soap12Binding.getStyle()); return false; } public static boolean isOneWay(BindingOperation operation) { return operation.getOperation().getStyle().equals(OperationType.ONE_WAY); } /** * Returns a list of parts for the specifed operation, either as specified in * body or all */ public static Part[] getInputParts(BindingOperation operation) { List<Part> result = new ArrayList<Part>(); Input input = operation.getOperation().getInput(); if (input == null || operation.getBindingInput() == null) return new Part[0]; Message msg = input.getMessage(); if (msg != null) { SOAPBody soapBody = WsdlUtils.getExtensiblityElement(operation.getBindingInput().getExtensibilityElements(), SOAPBody.class); if (soapBody == null || soapBody.getParts() == null) { SOAP12Body soap12Body = WsdlUtils.getExtensiblityElement(operation.getBindingInput() .getExtensibilityElements(), SOAP12Body.class); if (soap12Body == null || soap12Body.getParts() == null) { if (msg != null) result.addAll(msg.getOrderedParts(null)); } else { Iterator i = soap12Body.getParts().iterator(); while (i.hasNext()) { String partName = (String) i.next(); Part part = msg.getPart(partName); result.add(part); } } } else { Iterator i = soapBody.getParts().iterator(); while (i.hasNext()) { String partName = (String) i.next(); Part part = msg.getPart(partName); result.add(part); } } } else { } return result.toArray(new Part[result.size()]); } public static boolean isAttachmentInputPart(Part part, BindingOperation operation) { return getInputMultipartContent(part, operation).length > 0; } public static boolean isAttachmentOutputPart(Part part, BindingOperation operation) { return getOutputMultipartContent(part, operation).length > 0; } public static MIMEContent[] getOutputMultipartContent(Part part, BindingOperation operation) { BindingOutput output = operation.getBindingOutput(); if (output == null) return new MIMEContent[0]; MIMEMultipartRelated multipartOutput = WsdlUtils.getExtensiblityElement(output.getExtensibilityElements(), MIMEMultipartRelated.class); return getContentParts(part, multipartOutput); } public static MIMEContent[] getInputMultipartContent(Part part, BindingOperation operation) { BindingInput bindingInput = operation.getBindingInput(); if (bindingInput == null) return new MIMEContent[0]; MIMEMultipartRelated multipartInput = WsdlUtils.getExtensiblityElement(bindingInput.getExtensibilityElements(), MIMEMultipartRelated.class); return getContentParts(part, multipartInput); } public static MIMEContent[] getContentParts(Part part, MIMEMultipartRelated multipart) { List<MIMEContent> result = new ArrayList<MIMEContent>(); if (multipart != null) { List<MIMEPart> parts = multipart.getMIMEParts(); for (int c = 0; c < parts.size(); c++) { List<MIMEContent> contentParts = WsdlUtils.getExtensiblityElements(parts.get(c) .getExtensibilityElements(), MIMEContent.class); for (MIMEContent content : contentParts) { if (content.getPart().equals(part.getName())) result.add(content); } } } return result.toArray(new MIMEContent[result.size()]); } public static Part[] getFaultParts(BindingOperation bindingOperation, String faultName) throws Exception { List<Part> result = new ArrayList<Part>(); BindingFault bindingFault = bindingOperation.getBindingFault(faultName); SOAPFault soapFault = WsdlUtils.getExtensiblityElement(bindingFault.getExtensibilityElements(), SOAPFault.class); Operation operation = bindingOperation.getOperation(); if (soapFault != null && soapFault.getName() != null) { Fault fault = operation.getFault(soapFault.getName()); if (fault == null) throw new Exception("Missing Fault [" + soapFault.getName() + "] in operation [" + operation.getName() + "]"); result.addAll(fault.getMessage().getOrderedParts(null)); } else { SOAP12Fault soap12Fault = WsdlUtils.getExtensiblityElement(bindingFault.getExtensibilityElements(), SOAP12Fault.class); if (soap12Fault != null && soap12Fault.getName() != null) { Fault fault = operation.getFault(soap12Fault.getName()); if (fault != null && fault.getMessage() != null) result.addAll(fault.getMessage().getOrderedParts(null)); } else { Fault fault = operation.getFault(faultName); if (fault != null && fault.getMessage() != null) result.addAll(fault.getMessage().getOrderedParts(null)); } } return result.toArray(new Part[result.size()]); } public static Part[] getOutputParts(BindingOperation operation) { BindingOutput bindingOutput = operation.getBindingOutput(); if (bindingOutput == null) return new Part[0]; List<Part> result = new ArrayList<Part>(); Output output = operation.getOperation().getOutput(); if (output == null) return new Part[0]; Message msg = output.getMessage(); if (msg != null) { SOAPBody soapBody = WsdlUtils .getExtensiblityElement(bindingOutput.getExtensibilityElements(), SOAPBody.class); if (soapBody == null || soapBody.getParts() == null) { SOAP12Body soap12Body = WsdlUtils.getExtensiblityElement(bindingOutput.getExtensibilityElements(), SOAP12Body.class); if (soap12Body == null || soap12Body.getParts() == null) { result.addAll(msg.getOrderedParts(null)); } else { Iterator i = soap12Body.getParts().iterator(); while (i.hasNext()) { String partName = (String) i.next(); Part part = msg.getPart(partName); result.add(part); } } } else { Iterator i = soapBody.getParts().iterator(); while (i.hasNext()) { String partName = (String) i.next(); Part part = msg.getPart(partName); result.add(part); } } } else { log.warn("Missing output message for binding operation [" + operation.getName() + "]"); } return result.toArray(new Part[result.size()]); } public static String getSoapEndpoint(Port port) { SOAPAddress soapAddress = WsdlUtils.getExtensiblityElement(port.getExtensibilityElements(), SOAPAddress.class); if (soapAddress != null && StringUtils.isNotBlank(soapAddress.getLocationURI())) { try { return URLDecoder.decode(soapAddress.getLocationURI(), "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); return soapAddress.getLocationURI(); } } SOAP12Address soap12Address = WsdlUtils.getExtensiblityElement(port.getExtensibilityElements(), SOAP12Address.class); if (soap12Address != null && StringUtils.isNotBlank(soap12Address.getLocationURI())) { try { return URLDecoder.decode(soap12Address.getLocationURI(), "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); return soap12Address.getLocationURI(); } } return null; } public static boolean replaceSoapEndpoint(Port port, String endpoint) { SOAPAddress soapAddress = WsdlUtils.getExtensiblityElement(port.getExtensibilityElements(), SOAPAddress.class); if (soapAddress != null) { soapAddress.setLocationURI(endpoint); return true; } SOAP12Address soap12Address = WsdlUtils.getExtensiblityElement(port.getExtensibilityElements(), SOAP12Address.class); if (soap12Address != null) { soap12Address.setLocationURI(endpoint); return true; } return false; } public static String getSoapBodyNamespace(List<?> list) { SOAPBody soapBody = WsdlUtils.getExtensiblityElement(list, SOAPBody.class); if (soapBody != null) return soapBody.getNamespaceURI(); SOAP12Body soap12Body = WsdlUtils.getExtensiblityElement(list, SOAP12Body.class); if (soap12Body != null) return soap12Body.getNamespaceURI(); return null; } /** * A SOAP-Header wrapper * * @author ole.matzura */ public interface SoapHeader { public QName getMessage(); public String getPart(); } /** * SOAP 1.1 Header implementation * * @author ole.matzura */ public static class Soap11Header implements SoapHeader { private final SOAPHeader soapHeader; public Soap11Header(SOAPHeader soapHeader) { this.soapHeader = soapHeader; } public QName getMessage() { return soapHeader.getMessage(); } public String getPart() { return soapHeader.getPart(); } } /** * SOAP 1.2 Header implementation * * @author ole.matzura */ public static class Soap12Header implements SoapHeader { private final SOAP12Header soapHeader; public Soap12Header(SOAP12Header soapHeader) { this.soapHeader = soapHeader; } public QName getMessage() { return soapHeader.getMessage(); } public String getPart() { return soapHeader.getPart(); } } public static List<SoapHeader> getSoapHeaders(List list) { List<SoapHeader> result = new ArrayList<SoapHeader>(); List<SOAPHeader> soapHeaders = WsdlUtils.getExtensiblityElements(list, SOAPHeader.class); if (soapHeaders != null && !soapHeaders.isEmpty()) { for (SOAPHeader header : soapHeaders) result.add(new Soap11Header(header)); } else { List<SOAP12Header> soap12Headers = WsdlUtils.getExtensiblityElements(list, SOAP12Header.class); if (soap12Headers != null && !soap12Headers.isEmpty()) { for (SOAP12Header header : soap12Headers) result.add(new Soap12Header(header)); } } return result; } public static BindingOperation findBindingOperation(Binding binding, String bindingOperationName, String inputName, String outputName) { if (binding == null) return null; if (inputName == null) inputName = ":none"; if (outputName == null) outputName = ":none"; BindingOperation result = binding.getBindingOperation(bindingOperationName, inputName, outputName); if (result == null && (inputName.equals(":none") || outputName.equals(":none"))) { // fall back to this behaviour for WSDL4j 1.5.0 compatibility result = binding.getBindingOperation(bindingOperationName, inputName.equals(":none") ? null : inputName, outputName.equals(":none") ? null : outputName); } return result; } public static String getTargetNamespace(Definition definition) { return definition.getTargetNamespace() == null ? XMLConstants.NULL_NS_URI : definition.getTargetNamespace(); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.util.duplicates; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.PsiEquivalenceUtil; import com.intellij.lang.ASTNode; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.controlFlow.*; import com.intellij.psi.impl.source.PsiImmediateClassType; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.*; import com.intellij.refactoring.extractMethod.InputVariables; import com.intellij.refactoring.util.RefactoringChangeUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.IntArrayList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author dsl */ public class DuplicatesFinder { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.util.duplicates.DuplicatesFinder"); public static final Key<Pair<PsiVariable, PsiType>> PARAMETER = Key.create("PARAMETER"); @NotNull private final PsiElement[] myPattern; private final InputVariables myParameters; private final List<? extends PsiVariable> myOutputParameters; private final List<PsiElement> myPatternAsList; private boolean myMultipleExitPoints; @Nullable private final ReturnValue myReturnValue; private final boolean myWithExtractedParameters; private final Set<PsiVariable> myEffectivelyLocal; private ComplexityHolder myPatternComplexityHolder; private ComplexityHolder myCandidateComplexityHolder; public DuplicatesFinder(@NotNull PsiElement[] pattern, InputVariables parameters, @Nullable ReturnValue returnValue, @NotNull List<? extends PsiVariable> outputParameters, boolean withExtractedParameters, @Nullable Set<PsiVariable> effectivelyLocal) { myReturnValue = returnValue; LOG.assertTrue(pattern.length > 0); myPattern = pattern; myPatternAsList = Arrays.asList(myPattern); myParameters = parameters; myOutputParameters = outputParameters; myWithExtractedParameters = withExtractedParameters; myEffectivelyLocal = effectivelyLocal != null ? effectivelyLocal : Collections.emptySet(); final PsiElement codeFragment = ControlFlowUtil.findCodeFragment(pattern[0]); try { final ControlFlow controlFlow = ControlFlowFactory.getInstance(codeFragment.getProject()).getControlFlow(codeFragment, new LocalsControlFlowPolicy(codeFragment), false); int startOffset; int i = 0; do { startOffset = controlFlow.getStartOffset(pattern[i++]); } while(startOffset < 0 && i < pattern.length); int endOffset; int j = pattern.length - 1; do { endOffset = controlFlow.getEndOffset(pattern[j--]); } while(endOffset < 0 && j >= 0); IntArrayList exitPoints = new IntArrayList(); final Collection<PsiStatement> exitStatements = ControlFlowUtil .findExitPointsAndStatements(controlFlow, startOffset, endOffset, exitPoints, ControlFlowUtil.DEFAULT_EXIT_STATEMENTS_CLASSES); myMultipleExitPoints = exitPoints.size() > 1; if (myMultipleExitPoints) { myParameters.removeParametersUsedInExitsOnly(codeFragment, exitStatements, controlFlow, startOffset, endOffset); } } catch (AnalysisCanceledException e) { } } public DuplicatesFinder(@NotNull PsiElement[] pattern, InputVariables parameters, @Nullable ReturnValue returnValue, @NotNull List<? extends PsiVariable> outputParameters) { this(pattern, parameters, returnValue, outputParameters, false, null); } public DuplicatesFinder(final PsiElement[] pattern, final InputVariables psiParameters, final List<? extends PsiVariable> psiVariables) { this(pattern, psiParameters, null, psiVariables); } public InputVariables getParameters() { return myParameters; } @NotNull public PsiElement[] getPattern() { return myPattern; } @Nullable public ReturnValue getReturnValue() { return myReturnValue; } public List<Match> findDuplicates(PsiElement scope) { annotatePattern(); final ArrayList<Match> result = new ArrayList<>(); findPatternOccurrences(result, scope); deannotatePattern(); return result; } @Nullable public Match isDuplicate(@NotNull PsiElement element, boolean ignoreParameterTypesAndPostVariableUsages) { annotatePattern(); Match match = isDuplicateFragment(element, ignoreParameterTypesAndPostVariableUsages); deannotatePattern(); return match; } private void annotatePattern() { for (final PsiElement patternComponent : myPattern) { patternComponent.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { final PsiElement element = reference.resolve(); if (element instanceof PsiVariable) { final PsiVariable variable = (PsiVariable)element; PsiType type = variable.getType(); myParameters.annotateWithParameter(reference); if (myOutputParameters.contains(element)) { reference.putUserData(PARAMETER, Pair.create(variable, type)); } } PsiElement qualifier = reference.getQualifier(); if (qualifier != null) { qualifier.accept(this); } } }); } } private void deannotatePattern() { for (final PsiElement patternComponent : myPattern) { patternComponent.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { if (reference.getUserData(PARAMETER) != null) { reference.putUserData(PARAMETER, null); } } }); } } private void findPatternOccurrences(List<Match> array, PsiElement scope) { PsiElement[] children = scope.getChildren(); for (PsiElement child : children) { final Match match = isDuplicateFragment(child, false); if (match != null) { array.add(match); continue; } findPatternOccurrences(array, child); } } @Nullable private Match isDuplicateFragment(@NotNull PsiElement candidate, boolean ignoreParameterTypesAndPostVariableUsages) { if (isSelf(candidate)) return null; PsiElement sibling = candidate; ArrayList<PsiElement> candidates = new ArrayList<>(); for (final PsiElement element : myPattern) { if (sibling == null) return null; if (!canBeEquivalent(element, sibling) || isSelf(sibling)) return null; candidates.add(sibling); sibling = PsiTreeUtil.skipSiblingsForward(sibling, PsiWhiteSpace.class, PsiComment.class, PsiEmptyStatement.class); } LOG.assertTrue(myPattern.length == candidates.size()); if (myPattern.length == 1 && myPattern[0] instanceof PsiExpression) { if (candidates.get(0) instanceof PsiExpression) { final PsiExpression candidateExpression = (PsiExpression)candidates.get(0); if (PsiUtil.isAccessedForWriting(candidateExpression)) return null; final PsiType patternType = ((PsiExpression)myPattern[0]).getType(); final PsiType candidateType = candidateExpression.getType(); PsiSubstitutor substitutor = PsiSubstitutor.EMPTY; final PsiMethod method = PsiTreeUtil.getContextOfType(myPattern[0], PsiMethod.class); if (method != null) { final PsiResolveHelper resolveHelper = JavaPsiFacade.getInstance(candidate.getProject()).getResolveHelper(); substitutor = resolveHelper.inferTypeArguments(method.getTypeParameters(), new PsiType[]{patternType}, new PsiType[]{candidateType}, PsiUtil.getLanguageLevel(method)); } if (!canTypesBeEquivalent(substitutor.substitute(patternType), candidateType)) return null; } else { return null; } } final Match match = new Match(candidates.get(0), candidates.get(candidates.size() - 1), ignoreParameterTypesAndPostVariableUsages); for (int i = 0; i < myPattern.length; i++) { if (!matchPattern(myPattern[i], candidates.get(i), candidates, match)) return null; } if (!ignoreParameterTypesAndPostVariableUsages && checkPostVariableUsages(candidates, match)) return null; return match; } protected boolean isSelf(@NotNull PsiElement candidate) { for (PsiElement pattern : myPattern) { if (PsiTreeUtil.isAncestor(pattern, candidate, false)) { return true; } } return false; } private boolean checkPostVariableUsages(final ArrayList<PsiElement> candidates, final Match match) { final PsiElement codeFragment = ControlFlowUtil.findCodeFragment(candidates.get(0)); try { final ControlFlow controlFlow = ControlFlowFactory.getInstance(codeFragment.getProject()).getControlFlow(codeFragment, new LocalsControlFlowPolicy(codeFragment), false); int startOffset; int i = 0; do { startOffset = controlFlow.getStartOffset(candidates.get(i++)); } while(startOffset < 0 && i < candidates.size()); int endOffset; int j = candidates.size() - 1; do { endOffset = controlFlow.getEndOffset(candidates.get(j--)); } while(endOffset < 0 && j >= 0); final IntArrayList exitPoints = new IntArrayList(); ControlFlowUtil.findExitPointsAndStatements(controlFlow, startOffset, endOffset, exitPoints, ControlFlowUtil.DEFAULT_EXIT_STATEMENTS_CLASSES); final PsiVariable[] outVariables = ControlFlowUtil.getOutputVariables(controlFlow, startOffset, endOffset, exitPoints.toArray()); if (outVariables.length > 0) { if (outVariables.length == 1) { ReturnValue returnValue = match.getReturnValue(); if (returnValue == null) { returnValue = myReturnValue; } if (returnValue instanceof VariableReturnValue) { final ReturnValue value = match.getOutputVariableValue(((VariableReturnValue)returnValue).getVariable()); if (value != null) { if (value.isEquivalent(new VariableReturnValue(outVariables[0]))) return false; if (value instanceof ExpressionReturnValue) { final PsiExpression expression = ((ExpressionReturnValue)value).getExpression(); if (expression instanceof PsiReferenceExpression) { final PsiElement variable = ((PsiReferenceExpression)expression).resolve(); return variable == null || !PsiEquivalenceUtil.areElementsEquivalent(variable, outVariables[0]); } } } } } return true; } } catch (AnalysisCanceledException e) { } return false; } private static boolean canTypesBeEquivalent(PsiType type1, PsiType type2) { if (type1 == null || type2 == null) return false; if (!type2.isAssignableFrom(type1)) { if (type1 instanceof PsiImmediateClassType && type2 instanceof PsiImmediateClassType) { final PsiClass psiClass1 = ((PsiImmediateClassType)type1).resolve(); final PsiClass psiClass2 = ((PsiImmediateClassType)type2).resolve(); if (!(psiClass1 instanceof PsiAnonymousClass && psiClass2 instanceof PsiAnonymousClass && psiClass1.getManager().areElementsEquivalent(((PsiAnonymousClass)psiClass1).getBaseClassType().resolve(), ((PsiAnonymousClass)psiClass2).getBaseClassType().resolve()))) { return false; } } else { return false; } } return true; } private static boolean canBeEquivalent(final PsiElement pattern, PsiElement candidate) { if (pattern instanceof PsiReturnStatement && candidate instanceof PsiExpressionStatement) return true; if (pattern instanceof PsiReturnStatement && candidate instanceof PsiDeclarationStatement) return true; if (pattern instanceof PsiThisExpression && candidate instanceof PsiReferenceExpression) return true; final ASTNode node1 = pattern.getNode(); final ASTNode node2 = candidate.getNode(); if (node1 == null || node2 == null) return false; if (node1.getElementType() != node2.getElementType()) return false; if (pattern instanceof PsiUnaryExpression) { return ((PsiUnaryExpression)pattern).getOperationTokenType() == ((PsiUnaryExpression)candidate).getOperationTokenType(); } if (pattern instanceof PsiPolyadicExpression) { return ((PsiPolyadicExpression)pattern).getOperationTokenType() == ((PsiPolyadicExpression)candidate).getOperationTokenType(); } return true; } private boolean matchPattern(PsiElement pattern, PsiElement candidate, List<PsiElement> candidates, Match match) { if (pattern == null || candidate == null) return pattern == candidate; if (pattern.getUserData(PARAMETER) != null) { final Pair<PsiVariable, PsiType> parameter = pattern.getUserData(PARAMETER); if(!myWithExtractedParameters || parameter.second.equals(parameter.first.getType())) { return match.putParameter(parameter, candidate); } } Boolean matchedExtractablePart = matchExtractableExpression(pattern, candidate, candidates, match, false); if (matchedExtractablePart != null) return matchedExtractablePart; if (!canBeEquivalent(pattern, candidate)) return false; // Q : is it correct to check implementation classes? if (pattern instanceof PsiExpressionList && candidate instanceof PsiExpressionList) { //check varargs final PsiExpression[] expressions = ((PsiExpressionList)pattern).getExpressions(); final PsiExpression[] childExpressions = ((PsiExpressionList)candidate).getExpressions(); if (expressions.length > 0 && expressions[expressions.length - 1] instanceof PsiReferenceExpression) { final PsiElement resolved = ((PsiReferenceExpression)expressions[expressions.length - 1]).resolve(); if (resolved instanceof PsiParameter && ((PsiParameter)resolved).getType() instanceof PsiEllipsisType) { for(int i = 0; i < expressions.length - 1; i++) { final Pair<PsiVariable, PsiType> parameter = expressions[i].getUserData(PARAMETER); if (parameter == null) { if (!matchPattern(expressions[i], childExpressions[i], candidates, match)) { return false; } } else if (!match.putParameter(parameter, childExpressions[i])) return false; } final Pair<PsiVariable, PsiType> param = expressions[expressions.length - 1].getUserData(PARAMETER); if (param == null) return false; for(int i = expressions.length - 1; i < childExpressions.length; i++) { if (!match.putParameter(param, childExpressions[i])) return false; } return true; } } } if (pattern instanceof PsiAssignmentExpression) { final PsiExpression lExpression = PsiUtil.skipParenthesizedExprDown(((PsiAssignmentExpression)pattern).getLExpression()); if (lExpression.getType() instanceof PsiPrimitiveType && lExpression instanceof PsiReferenceExpression && ((PsiReferenceExpression)lExpression).resolve() instanceof PsiParameter) { return false; } } else if (pattern instanceof PsiUnaryExpression) { if (checkParameterModification(((PsiUnaryExpression)pattern).getOperand(), ((PsiUnaryExpression)pattern).getOperationTokenType(), ((PsiUnaryExpression)candidate).getOperand())) return false; } if (pattern instanceof PsiJavaCodeReferenceElement) { final PsiElement resolveResult1 = ((PsiJavaCodeReferenceElement)pattern).resolve(); final PsiElement resolveResult2 = ((PsiJavaCodeReferenceElement)candidate).resolve(); if (resolveResult1 instanceof PsiClass && resolveResult2 instanceof PsiClass) return true; if (isUnder(resolveResult1, myPatternAsList) && isUnder(resolveResult2, candidates)) { traverseParameter(resolveResult1, resolveResult2, match); return match.putDeclarationCorrespondence(resolveResult1, resolveResult2); } if (resolveResult1 instanceof PsiVariable && myEffectivelyLocal.contains((PsiVariable)resolveResult1)) { return (resolveResult2 instanceof PsiLocalVariable || resolveResult2 instanceof PsiParameter) && match.putDeclarationCorrespondence(resolveResult1, resolveResult2); } final PsiElement qualifier2 = ((PsiJavaCodeReferenceElement)candidate).getQualifier(); if (!equivalentResolve(resolveResult1, resolveResult2, qualifier2)) { return matchExtractableVariable(pattern, candidate, match); } PsiElement qualifier1 = ((PsiJavaCodeReferenceElement)pattern).getQualifier(); if (qualifier1 instanceof PsiReferenceExpression && qualifier2 instanceof PsiReferenceExpression && !match.areCorrespond(((PsiReferenceExpression)qualifier1).resolve(), ((PsiReferenceExpression)qualifier2).resolve())) { return false; } if (qualifier1 == null && qualifier2 == null) { final PsiClass patternClass = RefactoringChangeUtil.getThisClass(pattern); final PsiClass candidateClass = RefactoringChangeUtil.getThisClass(candidate); if (resolveResult1 == resolveResult2 && resolveResult1 instanceof PsiMember) { final PsiClass containingClass = ((PsiMember)resolveResult1).getContainingClass(); if (!InheritanceUtil.isInheritorOrSelf(candidateClass, patternClass, true) && InheritanceUtil.isInheritorOrSelf(candidateClass, containingClass, true) && InheritanceUtil.isInheritorOrSelf(patternClass, containingClass, true)) { return false; } } } } if (pattern instanceof PsiTypeCastExpression) { final PsiTypeElement castTypeElement1 = ((PsiTypeCastExpression)pattern).getCastType(); final PsiTypeElement castTypeElement2 = ((PsiTypeCastExpression)candidate).getCastType(); if (castTypeElement1 != null && castTypeElement2 != null) { final PsiType type1 = TypeConversionUtil.erasure(castTypeElement1.getType()); final PsiType type2 = TypeConversionUtil.erasure(castTypeElement2.getType()); if (!type1.equals(type2)) return false; } } else if (pattern instanceof PsiNewExpression) { final PsiType type1 = ((PsiNewExpression)pattern).getType(); final PsiType type2 = ((PsiNewExpression)candidate).getType(); if (type1 == null || type2 == null) return false; final PsiMethod constructor1 = ((PsiNewExpression)pattern).resolveConstructor(); final PsiMethod constructor2 = ((PsiNewExpression)candidate).resolveConstructor(); if (constructor1 != null && constructor2 != null) { if (!pattern.getManager().areElementsEquivalent(constructor1, constructor2)) return false; } else { if (!canTypesBeEquivalent(type1, type2)) return false; } } else if (pattern instanceof PsiClassObjectAccessExpression) { final PsiTypeElement operand1 = ((PsiClassObjectAccessExpression)pattern).getOperand(); final PsiTypeElement operand2 = ((PsiClassObjectAccessExpression)candidate).getOperand(); return operand1.getType().equals(operand2.getType()); } else if (pattern instanceof PsiInstanceOfExpression) { final PsiTypeElement operand1 = ((PsiInstanceOfExpression)pattern).getCheckType(); final PsiTypeElement operand2 = ((PsiInstanceOfExpression)candidate).getCheckType(); if (operand1 == null || operand2 == null) return false; if (!operand1.getType().equals(operand2.getType())) return false; } else if (pattern instanceof PsiReturnStatement) { final PsiReturnStatement patternReturnStatement = (PsiReturnStatement)pattern; return matchReturnStatement(patternReturnStatement, candidate, candidates, match); } else if (pattern instanceof PsiContinueStatement) { match.registerReturnValue(new ContinueReturnValue()); } else if (pattern instanceof PsiBreakStatement) { match.registerReturnValue(new BreakReturnValue()); }else if (pattern instanceof PsiMethodCallExpression) { final PsiMethod patternMethod = ((PsiMethodCallExpression)pattern).resolveMethod(); final PsiMethod candidateMethod = ((PsiMethodCallExpression)candidate).resolveMethod(); if (patternMethod != null && candidateMethod != null) { if (!MethodSignatureUtil.areSignaturesEqual(patternMethod, candidateMethod)) return false; } } else if (pattern instanceof PsiReferenceExpression) { final PsiReferenceExpression patternRefExpr = (PsiReferenceExpression)pattern; final PsiReferenceExpression candidateRefExpr = (PsiReferenceExpression)candidate; final PsiExpression patternQualifier = patternRefExpr.getQualifierExpression(); final PsiExpression candidateQualifier = candidateRefExpr.getQualifierExpression(); if (patternQualifier == null) { PsiClass contextClass = PsiTreeUtil.getContextOfType(pattern, PsiClass.class); if (candidateQualifier instanceof PsiReferenceExpression) { final PsiElement resolved = ((PsiReferenceExpression)candidateQualifier).resolve(); if (resolved instanceof PsiClass && contextClass != null && InheritanceUtil.isInheritorOrSelf(contextClass, (PsiClass)resolved, true)) { return true; } } return contextClass != null && match.registerInstanceExpression(candidateQualifier, contextClass); } else { if (candidateQualifier == null) { if (patternQualifier instanceof PsiThisExpression) { final PsiJavaCodeReferenceElement qualifier = ((PsiThisExpression)patternQualifier).getQualifier(); if (candidate instanceof PsiReferenceExpression) { PsiElement contextClass = qualifier == null ? PsiTreeUtil.getContextOfType(pattern, PsiClass.class) : qualifier.resolve(); return contextClass instanceof PsiClass && match.registerInstanceExpression(((PsiReferenceExpression)candidate).getQualifierExpression(), (PsiClass)contextClass); } } else { final PsiType type = patternQualifier.getType(); PsiClass contextClass = type instanceof PsiClassType ? ((PsiClassType)type).resolve() : null; try { final Pair<PsiVariable, PsiType> parameter = patternQualifier.getUserData(PARAMETER); if (parameter != null) { final PsiClass thisClass = RefactoringChangeUtil.getThisClass(parameter.first); if (contextClass != null && InheritanceUtil.isInheritorOrSelf(thisClass, contextClass, true)) { contextClass = thisClass; } final PsiClass thisCandidate = RefactoringChangeUtil.getThisClass(candidate); if (thisCandidate != null && InheritanceUtil.isInheritorOrSelf(thisCandidate, contextClass, true)) { contextClass = thisCandidate; } return contextClass != null && match.putParameter(parameter, RefactoringChangeUtil .createThisExpression(patternQualifier.getManager(), contextClass)); } else if (patternQualifier instanceof PsiReferenceExpression) { final PsiElement resolved = ((PsiReferenceExpression)patternQualifier).resolve(); if (resolved instanceof PsiClass) { final PsiClass classContext = PsiTreeUtil.getContextOfType(candidate, PsiClass.class); if (classContext != null && InheritanceUtil.isInheritorOrSelf(classContext, (PsiClass)resolved, true)) { return true; } } } return false; } catch (IncorrectOperationException e) { LOG.error(e); } } } else { if (patternQualifier instanceof PsiThisExpression && candidateQualifier instanceof PsiThisExpression) { final PsiJavaCodeReferenceElement thisPatternQualifier = ((PsiThisExpression)patternQualifier).getQualifier(); final PsiElement patternContextClass = thisPatternQualifier == null ? PsiTreeUtil.getContextOfType(patternQualifier, PsiClass.class) : thisPatternQualifier.resolve(); final PsiJavaCodeReferenceElement thisCandidateQualifier = ((PsiThisExpression)candidateQualifier).getQualifier(); final PsiElement candidateContextClass = thisCandidateQualifier == null ? PsiTreeUtil.getContextOfType(candidateQualifier, PsiClass.class) : thisCandidateQualifier.resolve(); return patternContextClass == candidateContextClass; } } } } else if (pattern instanceof PsiThisExpression) { final PsiJavaCodeReferenceElement qualifier = ((PsiThisExpression)pattern).getQualifier(); final PsiElement contextClass = qualifier == null ? PsiTreeUtil.getContextOfType(pattern, PsiClass.class) : qualifier.resolve(); if (candidate instanceof PsiReferenceExpression) { final PsiElement parent = candidate.getParent(); return parent instanceof PsiReferenceExpression && contextClass instanceof PsiClass && match.registerInstanceExpression(((PsiReferenceExpression)parent).getQualifierExpression(), (PsiClass)contextClass); } else if (candidate instanceof PsiThisExpression) { final PsiJavaCodeReferenceElement candidateQualifier = ((PsiThisExpression)candidate).getQualifier(); final PsiElement candidateContextClass = candidateQualifier == null ? PsiTreeUtil.getContextOfType(candidate, PsiClass.class) : candidateQualifier.resolve(); return contextClass == candidateContextClass; } } else if (pattern instanceof PsiSuperExpression) { final PsiJavaCodeReferenceElement qualifier = ((PsiSuperExpression)pattern).getQualifier(); final PsiElement contextClass = qualifier == null ? PsiTreeUtil.getContextOfType(pattern, PsiClass.class) : qualifier.resolve(); if (candidate instanceof PsiSuperExpression) { final PsiJavaCodeReferenceElement candidateQualifier = ((PsiSuperExpression)candidate).getQualifier(); return contextClass == (candidateQualifier != null ? candidateQualifier.resolve() : PsiTreeUtil.getContextOfType(candidate, PsiClass.class)); } } else if (pattern instanceof PsiModifierList) { return candidate instanceof PsiModifierList && matchModifierList((PsiModifierList)pattern, (PsiModifierList)candidate); } PsiElement[] children1 = getFilteredChildren(pattern); PsiElement[] children2 = getFilteredChildren(candidate); if (children1.length != children2.length) return false; for (int i = 0; i < children1.length; i++) { PsiElement child1 = children1[i]; PsiElement child2 = children2[i]; if (!matchPattern(child1, child2, candidates, match)) { matchedExtractablePart = matchExtractableExpression(child1, child2, candidates, match, true); return matchedExtractablePart != null && matchedExtractablePart; } } if (children1.length == 0) { if (pattern.getParent() instanceof PsiVariable && ((PsiVariable)pattern.getParent()).getNameIdentifier() == pattern) { return match.putDeclarationCorrespondence(pattern.getParent(), candidate.getParent()); } if (!pattern.textMatches(candidate)) return false; } return true; } @Nullable private Boolean matchExtractableExpression(PsiElement pattern, PsiElement candidate, List<PsiElement> candidates, Match match, boolean withFolding) { if (!(pattern instanceof PsiExpression) || !(candidate instanceof PsiExpression) || withFolding && !myWithExtractedParameters) { return null; } if (myPattern.length == 1 && myPattern[0] == pattern || candidates.size() == 1 && candidates.get(0) == candidate) { return null; } ComplexityHolder patternComplexity = null; if (withFolding) { if (myPatternComplexityHolder == null) { myPatternComplexityHolder = new ComplexityHolder(myPatternAsList); } patternComplexity = myPatternComplexityHolder; } ExtractableExpressionPart patternPart = ExtractableExpressionPart.match((PsiExpression)pattern, myPatternAsList, patternComplexity); if (patternPart == null) { return null; } ComplexityHolder candidatesComplexity = null; if (withFolding) { if (myCandidateComplexityHolder == null || myCandidateComplexityHolder.getScope() != candidates) { myCandidateComplexityHolder = new ComplexityHolder(candidates); } candidatesComplexity = myCandidateComplexityHolder; } ExtractableExpressionPart candidatePart = ExtractableExpressionPart.match((PsiExpression)candidate, candidates, candidatesComplexity); if (candidatePart == null) { return null; } if (patternPart.myValue != null && patternPart.myValue.equals(candidatePart.myValue)) { return true; } if (patternPart.myVariable == null || candidatePart.myVariable == null) { return myWithExtractedParameters && match.putExtractedParameter(patternPart, candidatePart); } return null; } private boolean matchExtractableVariable(PsiElement pattern, PsiElement candidate, Match match) { if (!myWithExtractedParameters || !(pattern instanceof PsiReferenceExpression) || !(candidate instanceof PsiReferenceExpression)) { return false; } ExtractableExpressionPart part1 = ExtractableExpressionPart.matchVariable((PsiReferenceExpression)pattern, null); if (part1 == null || part1.myVariable == null) { return false; } ExtractableExpressionPart part2 = ExtractableExpressionPart.matchVariable((PsiReferenceExpression)candidate, null); if (part2 == null || part2.myVariable == null) { return false; } return match.putExtractedParameter(part1, part2); } private static boolean matchModifierList(PsiModifierList modifierList1, PsiModifierList modifierList2) { if (!(modifierList1.getParent() instanceof PsiLocalVariable)) { // local variables can only have a final modifier, and are considered equivalent with or without it. for (String modifier : PsiModifier.MODIFIERS) { if (modifierList1.hasModifierProperty(modifier)) { if (!modifierList2.hasModifierProperty(modifier)) { return false; } } else if (modifierList2.hasModifierProperty(modifier)) { return false; } } } return AnnotationUtil.equal(modifierList1.getAnnotations(), modifierList2.getAnnotations()); } private static boolean checkParameterModification(PsiExpression expression, final IElementType sign, PsiExpression candidate) { expression = PsiUtil.skipParenthesizedExprDown(expression); candidate = PsiUtil.skipParenthesizedExprDown(candidate); if (expression instanceof PsiReferenceExpression && ((PsiReferenceExpression)expression).resolve() instanceof PsiParameter && (sign.equals(JavaTokenType.MINUSMINUS)|| sign.equals(JavaTokenType.PLUSPLUS))) { if (candidate instanceof PsiReferenceExpression && ((PsiReferenceExpression)candidate).resolve() instanceof PsiParameter) { return false; } return true; } return false; } private static void traverseParameter(PsiElement pattern, PsiElement candidate, Match match) { if (pattern == null || candidate == null) return; if (pattern.getUserData(PARAMETER) != null) { final Pair<PsiVariable, PsiType> parameter = pattern.getUserData(PARAMETER); match.putParameter(parameter, candidate); return; } PsiElement[] children1 = getFilteredChildren(pattern); PsiElement[] children2 = getFilteredChildren(candidate); if (children1.length != children2.length) return; for (int i = 0; i < children1.length; i++) { PsiElement child1 = children1[i]; PsiElement child2 = children2[i]; traverseParameter(child1, child2, match); } } private boolean matchReturnStatement(final PsiReturnStatement patternReturnStatement, PsiElement candidate, List<PsiElement> candidates, Match match) { if (candidate instanceof PsiExpressionStatement) { final PsiExpression expression = ((PsiExpressionStatement)candidate).getExpression(); if (expression instanceof PsiAssignmentExpression) { final PsiExpression returnValue = patternReturnStatement.getReturnValue(); final PsiExpression rExpression = ((PsiAssignmentExpression)expression).getRExpression(); if (!matchPattern(returnValue, rExpression, candidates, match)) return false; final PsiExpression lExpression = ((PsiAssignmentExpression)expression).getLExpression(); return match.registerReturnValue(new ExpressionReturnValue(lExpression)); } else return false; } else if (candidate instanceof PsiDeclarationStatement) { final PsiElement[] declaredElements = ((PsiDeclarationStatement)candidate).getDeclaredElements(); if (declaredElements.length != 1) return false; if (!(declaredElements[0] instanceof PsiVariable)) return false; final PsiVariable variable = (PsiVariable)declaredElements[0]; if (!matchPattern(patternReturnStatement.getReturnValue(), variable.getInitializer(), candidates, match)) return false; return match.registerReturnValue(new VariableReturnValue(variable)); } else if (candidate instanceof PsiReturnStatement) { final PsiExpression returnValue = PsiUtil.skipParenthesizedExprDown(((PsiReturnStatement)candidate).getReturnValue()); if (myMultipleExitPoints) { return match.registerReturnValue(new ConditionalReturnStatementValue(returnValue)); } else { final PsiElement classOrLambda = PsiTreeUtil.getContextOfType(returnValue, PsiClass.class, PsiLambdaExpression.class); final PsiElement commonParent = PsiTreeUtil.findCommonParent(match.getMatchStart(), match.getMatchEnd()); if (classOrLambda == null || !PsiTreeUtil.isAncestor(commonParent, classOrLambda, false)) { if (returnValue != null && !match.registerReturnValue(ReturnStatementReturnValue.INSTANCE)) return false; //do not register return value for return; statement } return matchPattern(PsiUtil.skipParenthesizedExprDown(patternReturnStatement.getReturnValue()), returnValue, candidates, match); } } else return false; } private static boolean equivalentResolve(final PsiElement resolveResult1, final PsiElement resolveResult2, PsiElement qualifier2) { if (Comparing.equal(resolveResult1, resolveResult2)) return true; if (resolveResult1 instanceof PsiMethod && resolveResult2 instanceof PsiMethod) { final PsiMethod method1 = (PsiMethod)resolveResult1; final PsiMethod method2 = (PsiMethod)resolveResult2; if (method1.hasModifierProperty(PsiModifier.STATIC)) return false; // static methods don't inherit if (ArrayUtil.find(method1.findSuperMethods(), method2) >= 0) return true; if (ArrayUtil.find(method2.findSuperMethods(), method1) >= 0) return true; if (method1.getName().equals(method2.getName())) { PsiClass class2 = method2.getContainingClass(); if (qualifier2 instanceof PsiReferenceExpression) { final PsiType type = ((PsiReferenceExpression)qualifier2).getType(); if (type instanceof PsiClassType){ final PsiClass resolvedClass = PsiUtil.resolveClassInType(type); if (!(resolvedClass instanceof PsiTypeParameter)) { class2 = resolvedClass; } } } if (class2 != null && PsiUtil.isAccessible(method1, class2, null)) { final PsiMethod[] methods = class2.getAllMethods(); if (ArrayUtil.find(methods, method1) != -1) return true; } } return false; } else { return false; } } static boolean isUnder(@Nullable PsiElement element, @NotNull List<PsiElement> parents) { if (element == null) return false; for (final PsiElement parent : parents) { if (PsiTreeUtil.isAncestor(parent, element, false)) return true; } return false; } @NotNull public static PsiElement[] getFilteredChildren(PsiElement element1) { PsiElement[] children1 = element1.getChildren(); ArrayList<PsiElement> array = new ArrayList<>(); for (PsiElement child : children1) { if (!(child instanceof PsiWhiteSpace) && !(child instanceof PsiComment) && !(child instanceof PsiEmptyStatement)) { if (child instanceof PsiBlockStatement) { child = ((PsiBlockStatement)child).getCodeBlock(); } if (child instanceof PsiCodeBlock) { final PsiStatement[] statements = ((PsiCodeBlock)child).getStatements(); for (PsiStatement statement : statements) { if (statement instanceof PsiBlockStatement) { Collections.addAll(array, getFilteredChildren(statement)); } else if (!(statement instanceof PsiEmptyStatement)) { array.add(statement); } } continue; } else if (child instanceof PsiParenthesizedExpression) { array.add(PsiUtil.skipParenthesizedExprDown((PsiParenthesizedExpression)child)); continue; } array.add(child); } } return PsiUtilCore.toPsiElementArray(array); } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.facelets.mock; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.net.URI; import java.security.Principal; import java.text.DateFormat; import java.text.ParseException; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.Hashtable; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Vector; import javax.servlet.RequestDispatcher; import javax.servlet.ServletContext; import javax.servlet.ServletInputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; /** * * @author Jacob Hookom * @version $Id: MockHttpServletRequest.java,v 1.3 2008/07/13 19:01:48 rlubke Exp $ */ public class MockHttpServletRequest implements HttpServletRequest { private final ServletContext servletContext; private final URI uri; private final String method; private Cookie[] cookies = new Cookie[0]; private final Hashtable headers = new Hashtable(); private String remoteUser; private String servletPath; private HttpSession session; private final Hashtable attributes = new Hashtable(); private final Properties param = new Properties(); private String characterEncoding = "ISO-8859-1"; private String contentType = "text/html"; private int contentLength = 0; private String protocol = "HTTP/1.1"; private String localName = "localhost"; private int localPort = 80; private String remoteAddr = "127.0.0.1"; private String remoteHost = "localhost"; private Locale locale = Locale.getDefault(); private Vector locales = new Vector(Arrays.asList(Locale .getAvailableLocales())); private boolean secure = false; private int remotePort = 1024; private String localAddr = "127.0.0.1"; private ServletInputStream inputStream = new MockServletInputStream(); public MockHttpServletRequest(ServletContext servletContext, URI uri) { this(servletContext, "GET", uri); } public MockHttpServletRequest(ServletContext servletContext, String uri) { this(servletContext, "GET", uri); } public MockHttpServletRequest(ServletContext servletContext, String method, String uri) { this(servletContext, method, URI.create(uri)); } public MockHttpServletRequest(ServletContext servletContext, String method, URI uri) { this.servletContext = servletContext; this.uri = uri; this.method = method; String q = this.uri.getRawQuery(); if (q != null) { String[] p = q.split("(&|=)"); for (int i = 0; i < p.length; i += 2) { this.param.put(p[i], p[i + 1]); } } } public String getAuthType() { return BASIC_AUTH; } public Cookie[] getCookies() { return this.cookies; } public long getDateHeader(String name) { String hdr = this.getHeader(name); if (hdr != null) { try { return DateFormat.getDateInstance(DateFormat.FULL).parse(hdr) .getTime(); } catch (ParseException e) { throw new IllegalArgumentException("Header " + name + ": " + hdr); } } return -1; } public String getHeader(String name) { Object obj = this.headers.get(name); if (obj instanceof List) { return ((List) obj).get(0).toString(); } else if (obj instanceof String) { return (String) obj; } return null; } public Enumeration getHeaders(String name) { Object obj = this.headers.get(name); if (obj instanceof Vector) { return ((Vector) obj).elements(); } else if (obj instanceof String) { Vector v = new Vector(); v.add(obj); return v.elements(); } return null; } public Enumeration getHeaderNames() { return this.headers.keys(); } public int getIntHeader(String name) { String hdr = this.getHeader(name); if (hdr != null) { try { return Integer.parseInt(hdr); } catch (Exception e) { throw new IllegalArgumentException("Header " + name + ": " + hdr); } } return -1; } public String getMethod() { return this.method; } public String getPathInfo() { return this.uri.getPath(); } public String getPathTranslated() { return this.servletContext.getRealPath(this.uri.getPath()); } public String getContextPath() { return this.uri.getPath(); } public String getQueryString() { return this.uri.getQuery(); } public String getRemoteUser() { return this.remoteUser; } public boolean isUserInRole(String role) { throw new UnsupportedOperationException(); } public Principal getUserPrincipal() { throw new UnsupportedOperationException(); } public String getRequestedSessionId() { return this.getParameter("jsessionid"); } public String getRequestURI() { return this.uri.getPath(); } public StringBuffer getRequestURL() { return new StringBuffer(this.uri.toString()); } public String getServletPath() { return this.servletPath; } public HttpSession getSession(boolean create) { if (this.session == null && create) { this.session = new MockHttpSession(this.servletContext); } return this.session; } public HttpSession getSession() { return this.getSession(true); } public boolean isRequestedSessionIdValid() { throw new UnsupportedOperationException(); } public boolean isRequestedSessionIdFromCookie() { throw new UnsupportedOperationException(); } public boolean isRequestedSessionIdFromURL() { throw new UnsupportedOperationException(); } public boolean isRequestedSessionIdFromUrl() { throw new UnsupportedOperationException(); } public Object getAttribute(String name) { return this.attributes.get(name); } public Enumeration getAttributeNames() { return this.attributes.keys(); } public String getCharacterEncoding() { return this.characterEncoding; } public void setCharacterEncoding(String characterEncoding) throws UnsupportedEncodingException { this.characterEncoding = characterEncoding; } public int getContentLength() { return this.contentLength; } public String getContentType() { return this.contentType; } public ServletInputStream getInputStream() throws IOException { return this.inputStream; } public String getParameter(String name) { return this.param.getProperty(name); } public Enumeration getParameterNames() { return this.param.keys(); } public String[] getParameterValues(String name) { String p = this.param.getProperty(name); if (p != null) { return p.split(","); } return null; } public void setParameter(String name, String value) { this.param.put(name, value); } public Map getParameterMap() { return Collections.unmodifiableMap(this.param); } public String getProtocol() { return this.protocol; } public String getScheme() { return this.uri.getScheme(); } public String getServerName() { return this.localName; } public int getServerPort() { return this.localPort; } public BufferedReader getReader() throws IOException { if (this.inputStream != null) { Reader sourceReader = (this.characterEncoding != null) ? new InputStreamReader( this.inputStream, this.characterEncoding) : new InputStreamReader(this.inputStream); return new BufferedReader(sourceReader); } else { return null; } } public String getRemoteAddr() { return this.remoteAddr; } public String getRemoteHost() { return this.remoteHost; } public void setAttribute(String name, Object value) { this.attributes.put(name, value); } public void removeAttribute(String name) { this.attributes.remove(name); } public Locale getLocale() { return this.locale; } public Enumeration getLocales() { return this.locales.elements(); } public boolean isSecure() { return this.secure; } public RequestDispatcher getRequestDispatcher(String path) { return this.servletContext.getRequestDispatcher(path); } public String getRealPath(String path) { return this.servletContext.getRealPath(path); } public int getRemotePort() { return this.remotePort; } public String getLocalName() { return this.localName; } public String getLocalAddr() { return this.localAddr; } public int getLocalPort() { return this.localPort; } }
package gr.forth.ics.graph; import junit.framework.*; import gr.forth.ics.graph.algo.Generators; import gr.forth.ics.graph.algo.transitivity.SuccessorSetFactory; import gr.forth.ics.graph.algo.transitivity.Transitivity; import gr.forth.ics.graph.path.Path; import java.util.List; import java.util.Set; import randomunit.*; import static gr.forth.ics.graph.Graphs.printCompact; public class GraphsTest extends RandomizedTestCase { public GraphsTest(String testName) { super(testName, 1000, new SimpleLogStrategy(4)); } public static Test suite() { TestSuite suite = new TestSuite(GraphsTest.class); return suite; } public void testIsTree() { Graph g = new PrimaryGraph(); assertTrue(GraphChecker.isTree(g)); Node n1 = g.newNode("1"); Node n2 = g.newNode("2"); Node n3 = g.newNode("3"); g.newEdge(n1, n2); g.newEdge(n2, n3); assertTrue(GraphChecker.isTree(g)); g.newEdge(n3, n1); assertFalse(GraphChecker.isTree(g)); } public void testIsForest() { Graph g = new PrimaryGraph(); assertTrue(GraphChecker.isTree(g)); Node n1 = g.newNode("1"); Node n2 = g.newNode("2"); Node n3 = g.newNode("3"); g.newEdge(n1, n2); g.newEdge(n2, n3); g.newEdge(g.newNode(), g.newNode()); assertTrue(GraphChecker.isForest(g)); assertFalse(GraphChecker.isTree(g)); } public void testIsBiconnected() { Graph g = new PrimaryGraph(); assertTrue(GraphChecker.isBiconnected(g)); Node n1 = g.newNode(); assertTrue(GraphChecker.isBiconnected(g)); Node n2 = g.newNode(); assertFalse(GraphChecker.isBiconnected(g)); g.newEdge(n1, n2); assertTrue(GraphChecker.isBiconnected(g)); g.newNode(); assertFalse(GraphChecker.isBiconnected(g)); } public void testEquality() { SecondaryGraph g1 = new SecondaryGraph(); SecondaryGraph g2 = new SecondaryGraph(); assertTrue(Graphs.equalGraphs(g1, g2)); Node n1 = g1.newNode(); Edge e1 = g1.newEdge(n1, n1); assertFalse(Graphs.equalGraphs(g1, g2)); g2.adoptNode(n1); assertFalse(Graphs.equalGraphs(g1, g2)); g2.adoptEdge(e1); assertTrue(Graphs.equalGraphs(g1, g2)); g1.removeAllEdges(); assertFalse(Graphs.equalGraphs(g1, g2)); g2.removeAllEdges(); assertTrue(Graphs.equalGraphs(g1, g2)); g1.removeAllNodes(); assertFalse(Graphs.equalGraphs(g1, g2)); g2.removeAllNodes(); assertTrue(Graphs.equalGraphs(g1, g2)); } public void testBasicUnion() { SecondaryGraph g1 = new SecondaryGraph(); SecondaryGraph g2 = new SecondaryGraph(); InspectableGraph union = Graphs.union(g1, g2); Node n1 = g1.newNode(1); assertTrue(union.containsNode(n1)); Node n2 = g2.newNode(2); assertTrue(union.containsNode(n2)); Edge e1 = g1.newEdge(n1, n1); assertTrue(union.containsEdge(e1)); Edge e2 = g2.newEdge(n2, n2); assertTrue(union.containsEdge(e2)); g2.adoptEdge(e1); assertEquals(2, union.nodeCount()); assertEquals(2, union.edgeCount()); g1.removeNode(n1); assertEquals(2, union.nodeCount()); assertEquals(2, union.edgeCount()); g2.removeNode(n1); assertEquals(1, union.nodeCount()); assertEquals(1, union.edgeCount()); } public void testBasicIntersection() { SecondaryGraph g1 = new SecondaryGraph(); SecondaryGraph g2 = new SecondaryGraph(); InspectableGraph inter = Graphs.intersection(g1, g2); Node n1 = g1.newNode(1); assertTrue(!inter.containsNode(n1)); Node n2 = g2.newNode(2); assertTrue(!inter.containsNode(n2)); Edge e1 = g1.newEdge(n1, n1); assertTrue(!inter.containsEdge(e1)); Edge e2 = g2.newEdge(n2, n2); assertTrue(!inter.containsEdge(e2)); assertTrue(inter.isEmpty()); g1.adoptNode(n2); assertTrue(inter.containsNode(n2)); g2.adoptNode(n1); assertTrue(inter.containsNode(n1)); g1.adoptEdge(e2); assertTrue(inter.containsEdge(e2)); g2.adoptEdge(e1); assertTrue(inter.containsEdge(e1)); assertEquals(2, inter.nodeCount()); assertEquals(2, inter.edgeCount()); g1.removeAllEdges(); assertEquals(0, inter.edgeCount()); g1.removeAllNodes(); assertTrue(inter.isEmpty()); } public void testBasicSubtraction() { SecondaryGraph g1 = new SecondaryGraph(); SecondaryGraph g2 = new SecondaryGraph(); InspectableGraph sub = Graphs.subtraction(g1, g2); Node n1 = g1.newNode(1); assertTrue(sub.containsNode(n1)); g2.adoptNode(n1); assertTrue(!sub.containsNode(n1)); Edge e1 = g1.newEdge(n1, n1); assertTrue(sub.containsEdge(e1)); g2.adoptEdge(e1); assertTrue(!sub.containsEdge(e1)); g2.removeEdge(e1); assertTrue(sub.containsEdge(e1)); g1.removeEdge(e1); assertTrue(!sub.containsEdge(e1)); g1.removeAllNodes(); assertTrue(sub.isEmpty()); } public void testBasicXor() { SecondaryGraph g1 = new SecondaryGraph(); Node[] n = g1.newNodes(1, 2, 3); SecondaryGraph g2 = new SecondaryGraph(); g2.adoptNode(n[0]); InspectableGraph xor = Graphs.xor(g1, g2); assertFalse(xor.containsNode(n[0])); assertTrue(xor.containsNode(n[1])); assertTrue(xor.containsNode(n[2])); Node n4 = g1.newNode(4); assertTrue(xor.containsNode(n4)); g2.adoptNode(n4); assertFalse(xor.containsNode(n4)); g1.removeNode(n4); assertTrue(xor.containsNode(n4)); g1.adoptNode(n4); g2.removeNode(n4); Edge e1 = g1.newEdge(n4, n4); assertTrue(xor.containsEdge(e1)); g2.adoptEdge(e1); assertFalse(xor.containsEdge(e1)); g1.removeAllNodes(); assertTrue(xor.containsNode(n4)); } public void testXorReinsertsEdges() { Graph g1 = new PrimaryGraph(); SecondaryGraph g2 = new SecondaryGraph(); InspectableGraph xor = Graphs.xor(g1, g2); Node[] n = g1.newNodes("owned", "shared"); Edge e = g1.newEdge(n[0], n[1]); g2.adoptEdge(e); assertTrue(xor.isEmpty()); g2.removeNode(n[0]); g2.removeNode(n[1]); assertTrue(xor.containsEdge(e)); } //randomized test private final Graph g1 = new PrimaryGraph(); private final Graph g2 = new SecondaryGraph(); private final InspectableGraph union = Graphs.union(g1, g2); private final InspectableGraph intersection = Graphs.intersection(g1, g2); private final InspectableGraph subtraction = Graphs.subtraction(g1, g2); private final InspectableGraph xor = Graphs.xor(g1, g2); @Prob(1) void randomAddNode() { Graph graph = random.nextBoolean() ? g1 : g2; Node n = graph.newNode(getCurrentStep()); checkConditions(); } @Prob(1) void randomAddEdge() { Graph graph = random.nextBoolean() ? g1 : g2; precondition(graph.nodeCount() > 0); List<Node> nodes = graph.nodes().drainToList(); graph.newEdge(pickRandom(nodes), pickRandom(nodes)); checkConditions(); } @Prob(1) void randomRemoveNode() { Graph graph = random.nextBoolean() ? g1 : g2; precondition(graph.nodeCount() > 0); List<Node> nodes = graph.nodes().drainToList(); graph.removeNode(pickRandom(nodes)); checkConditions(); } @Prob(1) void randomRemoveEdge() { Graph graph = random.nextBoolean() ? g1 : g2; precondition(graph.edgeCount() > 0); List<Edge> edges = graph.edges().drainToList(); graph.removeEdge(pickRandom(edges)); checkConditions(); } private void checkConditions() { checkUnion(); checkIntersection(); checkSubtraction(); checkXor(); } private void checkUnion() { for (Node n : allNodes()) { invariant(union.containsNode(n)); } for (Edge e : allEdges()) { invariant(union.containsEdge(e)); } } private void checkIntersection() { for (Node n : allNodes()) { if (g1.containsNode(n) && g2.containsNode(n)) { invariant(intersection.containsNode(n)); } else { invariant(!intersection.containsNode(n)); } } for (Edge e : allEdges()) { if (g1.containsEdge(e) && g2.containsEdge(e)) { invariant(intersection.containsEdge(e)); } else { invariant(!intersection.containsEdge(e)); } } } private void checkSubtraction() { for (Node n : allNodes()) { if (g1.containsNode(n) && !g2.containsNode(n)) { invariant(subtraction.containsNode(n)); } else { invariant(!subtraction.containsNode(n)); } } for (Edge e : allEdges()) { if (g1.containsEdge(e) && !g2.containsEdge(e)) { invariant(subtraction.containsEdge(e)); } else { invariant(!subtraction.containsEdge(e)); } } } private void checkXor() { for (Node n : allNodes()) { if (g1.containsNode(n) ^ g2.containsNode(n)) { invariant(xor.containsNode(n)); } else { invariant(!xor.containsNode(n)); } } for (Edge e : allEdges()) { if (g1.containsEdge(e) ^ g2.containsEdge(e)) { invariant(xor.containsEdge(e)); } else { invariant(!xor.containsEdge(e)); } } } private Set<Node> allNodes() { Set<Node> nodes = g1.nodes().drainToSet(); g2.nodes().drainTo(nodes); return nodes; } private Set<Edge> allEdges() { Set<Edge> edges = g1.edges().drainToSet(); g2.edges().drainTo(edges); return edges; } private <T> T pickRandom(List<T> list) { return list.get(random.nextInt(list.size())); } public void testIsConnected() { Graph g = new PrimaryGraph(); GraphBuilder pg = new GraphBuilder(g); Path path = pg.newPath(g.newNodes(5)); assertTrue(GraphChecker.isConnected(g)); Edge e = path.getEdge(2); g.removeEdge(e); assertFalse(GraphChecker.isConnected(g)); g.reinsertEdge(e); assertTrue(GraphChecker.isConnected(g)); } public void testPrettyPrint() { StringBuilder sb = new StringBuilder(); Graph g = new PrimaryGraph(); Node[] n = g.newNodes(1, 2); g.newEdge(n[0], n[1]); Graphs.printPretty(g, sb); assertEquals("Nodes (count = 2):\n" + "1\n" + "2\n" + "\n" + "Edges (count = 1):\n" + "{1->2}\n", sb.toString()); } public void testMaxDegree() { Graph g = new PrimaryGraph(); assertEquals(0, Graphs.maxDegree(g)); Node[] n = g.newNodes(3); g.newEdge(n[0], n[1]); g.newEdge(n[2], n[0]); assertEquals(1, Graphs.maxDegree(g, Direction.OUT)); assertEquals(2, Graphs.maxDegree(g, Direction.EITHER)); assertEquals(2, Graphs.maxDegree(g)); } public void testMinDegree() { Graph g = new PrimaryGraph(); assertEquals(Integer.MAX_VALUE, Graphs.minDegree(g)); Node[] n = g.newNodes(3); g.newEdge(n[0], n[1]); g.newEdge(n[2], n[0]); g.newEdge(n[2], n[2]); assertEquals(0, Graphs.minDegree(g, Direction.OUT)); assertEquals(1, Graphs.minDegree(g, Direction.IN)); assertEquals(1, Graphs.minDegree(g, Direction.EITHER)); assertEquals(1, Graphs.minDegree(g)); } public void testIsSequenceGraphical() { assertFalse(GraphChecker.isSequenceGraphical(3, 2, 0)); assertTrue(GraphChecker.isSequenceGraphical(3, 3, 2, 2, 2)); assertTrue(GraphChecker.isSequenceGraphical(3, 2, 2, 1)); assertTrue(GraphChecker.isSequenceGraphical(1, 2, 1)); assertTrue(GraphChecker.isSequenceGraphical(3, 3, 2, 1, 1)); } public void testCollectNodes() { Graph g = new PrimaryGraph(); Graphs.attachNodeNamer(g); Generators.createRandomDag(g, 7, 0.2); Graph closure = new SecondaryGraph(g); Transitivity.materialize(closure, Transitivity.acyclicClosure(closure, SuccessorSetFactory.hashSetBased())); for (Node n : g.nodes()) { Set<Node> nextNodes = Graphs.collectNodes(g, n, Direction.OUT); Set<Node> neighbors = closure.adjacentNodes(n, Direction.OUT).drainToSet(); assertEquals(nextNodes, neighbors); } } public void testCollectNodesEmpty() { Graph g = new PrimaryGraph(); Node n = g.newNode(); assertTrue(Graphs.collectNodes(g, n, Direction.OUT).isEmpty()); } public void testCollectNodesSingleton() { Graph g = new PrimaryGraph(); Node n = g.newNode(); g.newEdge(n, n); assertTrue(Graphs.collectNodes(g, n, Direction.OUT).size() == 1); } }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.fullscreen; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ObjectAnimator; import android.app.Activity; import android.content.res.Resources; import android.os.Build; import android.os.Handler; import android.os.Message; import android.os.SystemClock; import android.util.Property; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewGroup.LayoutParams; import android.view.Window; import android.widget.FrameLayout; import org.chromium.base.ActivityState; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.base.ApplicationStatus; import org.chromium.base.ApplicationStatus.ActivityStateListener; import org.chromium.base.BaseChromiumApplication; import org.chromium.base.BaseChromiumApplication.WindowFocusChangedListener; import org.chromium.base.TraceEvent; import org.chromium.base.VisibleForTesting; import org.chromium.chrome.browser.Tab; import org.chromium.chrome.browser.fullscreen.FullscreenHtmlApiHandler.FullscreenHtmlApiDelegate; import org.chromium.chrome.browser.tabmodel.TabModelSelector; import org.chromium.content.browser.ContentVideoView; import org.chromium.content.browser.ContentViewCore; import org.chromium.content_public.common.TopControlsState; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashSet; /** * A class that manages control and content views to create the fullscreen mode. */ public class ChromeFullscreenManager extends FullscreenManager implements ActivityStateListener, WindowFocusChangedListener { // Minimum showtime of the toolbar (in ms). private static final long MINIMUM_SHOW_DURATION_MS = 3000; // Maximum length of the slide in/out animation of the toolbar (in ms). private static final long MAX_ANIMATION_DURATION_MS = 500; private static final int MSG_ID_CONTROLS_REQUEST_LAYOUT = 1; private static final int MSG_ID_HIDE_CONTROLS = 2; private final HashSet<Integer> mPersistentControlTokens = new HashSet<Integer>(); private final Activity mActivity; private final Window mWindow; private final Handler mHandler; private final int mControlContainerHeight; private final View mControlContainer; private long mMinShowNotificationMs = MINIMUM_SHOW_DURATION_MS; private long mMaxAnimationDurationMs = MAX_ANIMATION_DURATION_MS; private float mBrowserControlOffset = Float.NaN; private float mRendererControlOffset = Float.NaN; private float mRendererContentOffset; private float mPreviousContentOffset = Float.NaN; private float mControlOffset; private float mPreviousControlOffset; private boolean mIsEnteringPersistentModeState; private boolean mInGesture; private boolean mContentViewScrolling; private int mPersistentControlsCurrentToken; private long mCurrentShowTime; private int mActivityShowToken = INVALID_TOKEN; private ObjectAnimator mControlAnimation; private boolean mCurrentAnimationIsShowing; private boolean mDisableBrowserOverride; private boolean mTopControlsPermanentlyHidden; private boolean mTopControlsAndroidViewHidden; private final boolean mSupportsBrowserOverride; private final ArrayList<FullscreenListener> mListeners = new ArrayList<FullscreenListener>(); /** * A listener that gets notified of changes to the fullscreen state. */ public interface FullscreenListener { /** * Called whenever the content's offset changes. * @param offset The new offset of the content from the top of the screen. */ public void onContentOffsetChanged(float offset); /** * Called whenever the content's visible offset changes. * @param offset The new offset of the visible content from the top of the screen. */ public void onVisibleContentOffsetChanged(float offset); /** * Called when a ContentVideoView is created/destroyed. * @param enabled Whether to enter or leave overlay video mode. */ public void onToggleOverlayVideoMode(boolean enabled); } private class ControlsOffsetProperty extends Property<ChromeFullscreenManager, Float> { public ControlsOffsetProperty() { super(Float.class, "controlsOffset"); } @Override public Float get(ChromeFullscreenManager object) { return getControlOffset(); } @Override public void set(ChromeFullscreenManager manager, Float offset) { if (mDisableBrowserOverride) return; float browserOffset = offset.floatValue(); if (Float.compare(mBrowserControlOffset, browserOffset) == 0) return; mBrowserControlOffset = browserOffset; manager.updateControlOffset(); manager.updateVisuals(); } } private final Runnable mUpdateVisibilityRunnable = new Runnable() { @Override public void run() { int visibility = shouldShowAndroidControls() ? View.VISIBLE : View.INVISIBLE; if (mControlContainer.getVisibility() == visibility) return; // requestLayout is required to trigger a new gatherTransparentRegion(), which // only occurs together with a layout and let's SurfaceFlinger trim overlays. // This may be almost equivalent to using View.GONE, but we still use View.INVISIBLE // since drawing caches etc. won't be destroyed, and the layout may be less expensive. mControlContainer.setVisibility(visibility); mControlContainer.requestLayout(); } }; // This static inner class holds a WeakReference to the outer object, to avoid triggering the // lint HandlerLeak warning. private static class FullscreenHandler extends Handler { private final WeakReference<ChromeFullscreenManager> mChromeFullscreenManager; public FullscreenHandler(ChromeFullscreenManager chromeFullscreenManager) { mChromeFullscreenManager = new WeakReference<ChromeFullscreenManager>( chromeFullscreenManager); } @Override public void handleMessage(Message msg) { if (msg == null) return; ChromeFullscreenManager chromeFullscreenManager = mChromeFullscreenManager.get(); if (chromeFullscreenManager == null) return; switch (msg.what) { case MSG_ID_CONTROLS_REQUEST_LAYOUT: chromeFullscreenManager.mControlContainer.requestLayout(); break; case MSG_ID_HIDE_CONTROLS: chromeFullscreenManager.update(false); break; default: assert false : "Unexpected message for ID: " + msg.what; break; } } } /** * Creates an instance of the fullscreen mode manager. * @param activity The activity that supports fullscreen. * @param controlContainer Container holding the controls (Toolbar). * @param modelSelector The model selector providing access to the current tab. * @param resControlContainerHeight The dimension resource ID for the control container height. * @param supportsBrowserOverride Whether we want to disable the token system used by the browser. */ public ChromeFullscreenManager(Activity activity, View controlContainer, TabModelSelector modelSelector, int resControlContainerHeight, boolean supportsBrowserOverride) { super(activity.getWindow(), modelSelector); mActivity = activity; ApplicationStatus.registerStateListenerForActivity(this, activity); ((BaseChromiumApplication) activity.getApplication()) .registerWindowFocusChangedListener(this); mWindow = activity.getWindow(); mHandler = new FullscreenHandler(this); assert controlContainer != null; mControlContainer = controlContainer; Resources resources = mWindow.getContext().getResources(); mControlContainerHeight = resources.getDimensionPixelSize(resControlContainerHeight); mRendererContentOffset = mControlContainerHeight; mSupportsBrowserOverride = supportsBrowserOverride; updateControlOffset(); } @Override public void onActivityStateChange(Activity activity, int newState) { if (newState == ActivityState.STOPPED) { // Exit fullscreen in onStop to ensure the system UI flags are set correctly when // showing again (on JB MR2+ builds, the omnibox would be covered by the // notification bar when this was done in onStart()). setPersistentFullscreenMode(false); } else if (newState == ActivityState.STARTED) { // Force the controls to be shown until we get an update from a Tab. This is a // workaround for when the renderer is killed but the Tab is not notified. mActivityShowToken = showControlsPersistentAndClearOldToken(mActivityShowToken); } else if (newState == ActivityState.DESTROYED) { ApplicationStatus.unregisterActivityStateListener(this); ((BaseChromiumApplication) mWindow.getContext().getApplicationContext()) .unregisterWindowFocusChangedListener(this); } } @Override public void onWindowFocusChanged(Activity activity, boolean hasFocus) { if (mActivity != activity) return; onWindowFocusChanged(hasFocus); ContentVideoView videoView = ContentVideoView.getContentVideoView(); if (videoView != null) { videoView.onFullscreenWindowFocused(); } } @Override protected FullscreenHtmlApiDelegate createApiDelegate() { return new FullscreenHtmlApiDelegate() { @Override public View getNotificationAnchorView() { return mControlContainer; } @Override public int getNotificationOffsetY() { return (int) getControlOffset(); } @Override public void onEnterFullscreen() { Tab tab = getActiveTab(); if (getControlOffset() == -mControlContainerHeight) { // The top controls are currently hidden. getHtmlApiHandler().enterFullscreen(tab); } else { // We should hide top controls first. mIsEnteringPersistentModeState = true; tab.updateFullscreenEnabledState(); } } @Override public boolean cancelPendingEnterFullscreen() { boolean wasPending = mIsEnteringPersistentModeState; mIsEnteringPersistentModeState = false; return wasPending; } @Override public void onFullscreenExited(Tab tab) { // At this point, top controls are hidden. Show top controls only if it's // permitted. tab.updateTopControlsState(TopControlsState.SHOWN, true); } @Override public boolean shouldShowNotificationBubble() { return !isOverlayVideoMode(); } }; } /** * Disables the ability for the browser to override the renderer provided top controls * position for testing. */ @VisibleForTesting public void disableBrowserOverrideForTest() { mDisableBrowserOverride = true; mPersistentControlTokens.clear(); mHandler.removeMessages(MSG_ID_HIDE_CONTROLS); if (mControlAnimation != null) { mControlAnimation.cancel(); mControlAnimation = null; } mBrowserControlOffset = Float.NaN; updateVisuals(); } /** * Allows tests to override the animation durations for faster tests. * @param minShowDuration The minimum time the controls must be shown. * @param maxAnimationDuration The maximum animation time to show/hide the controls. */ @VisibleForTesting public void setAnimationDurationsForTest(long minShowDuration, long maxAnimationDuration) { mMinShowNotificationMs = minShowDuration; mMaxAnimationDurationMs = maxAnimationDuration; } @Override public void showControlsTransient() { if (!mSupportsBrowserOverride) return; if (mPersistentControlTokens.isEmpty()) update(true); } @Override public int showControlsPersistent() { if (!mSupportsBrowserOverride) return INVALID_TOKEN; int token = mPersistentControlsCurrentToken++; mPersistentControlTokens.add(token); if (mPersistentControlTokens.size() == 1) update(true); return token; } @Override public int showControlsPersistentAndClearOldToken(int oldToken) { if (!mSupportsBrowserOverride) return INVALID_TOKEN; if (oldToken != INVALID_TOKEN) mPersistentControlTokens.remove(oldToken); return showControlsPersistent(); } @Override public void hideControlsPersistent(int token) { if (!mSupportsBrowserOverride) return; if (mPersistentControlTokens.remove(token) && mPersistentControlTokens.isEmpty()) { update(false); } } /** * @param remove Whether or not to forcefully remove the toolbar. */ public void setTopControlsPermamentlyHidden(boolean remove) { if (remove == mTopControlsPermanentlyHidden) return; mTopControlsPermanentlyHidden = remove; updateVisuals(); } /** * @return Whether or not the toolbar is forcefully being removed. */ public boolean areTopControlsPermanentlyHidden() { return mTopControlsPermanentlyHidden; } /** * @return Whether the top controls should be drawn as a texture. */ public boolean drawControlsAsTexture() { return getControlOffset() > -mControlContainerHeight; } /** * @return The height of the top controls in pixels. */ public int getTopControlsHeight() { return mControlContainerHeight; } @Override public float getContentOffset() { if (mTopControlsPermanentlyHidden) return 0; return rendererContentOffset(); } /** * @return The offset of the controls from the top of the screen. */ public float getControlOffset() { if (mTopControlsPermanentlyHidden) return -getTopControlsHeight(); return mControlOffset; } @SuppressWarnings("SelfEquality") private void updateControlOffset() { float offset = 0; // Inline Float.isNan with "x != x": final boolean isNaNBrowserControlOffset = mBrowserControlOffset != mBrowserControlOffset; final float rendererControlOffset = rendererControlOffset(); final boolean isNaNRendererControlOffset = rendererControlOffset != rendererControlOffset; if (!isNaNBrowserControlOffset || !isNaNRendererControlOffset) { offset = Math.max( isNaNBrowserControlOffset ? -mControlContainerHeight : mBrowserControlOffset, isNaNRendererControlOffset ? -mControlContainerHeight : rendererControlOffset); } mControlOffset = offset; } @Override public void setOverlayVideoMode(boolean enabled) { super.setOverlayVideoMode(enabled); for (int i = 0; i < mListeners.size(); i++) { mListeners.get(i).onToggleOverlayVideoMode(enabled); } } /** * @return Whether the browser has a control offset override. */ @VisibleForTesting public boolean hasBrowserControlOffsetOverride() { return !Float.isNaN(mBrowserControlOffset) || mControlAnimation != null || !mPersistentControlTokens.isEmpty(); } /** * Returns how tall the opaque portion of the control container is. */ public float controlContainerHeight() { return mControlContainerHeight; } private float rendererContentOffset() { return mRendererContentOffset; } private float rendererControlOffset() { return mRendererControlOffset; } /** * @return The visible offset of the content from the top of the screen. */ public float getVisibleContentOffset() { return mControlContainerHeight + getControlOffset(); } /** * @param listener The {@link FullscreenListener} to be notified of fullscreen changes. */ public void addListener(FullscreenListener listener) { if (!mListeners.contains(listener)) mListeners.add(listener); } /** * @param listener The {@link FullscreenListener} to no longer be notified of fullscreen * changes. */ public void removeListener(FullscreenListener listener) { mListeners.remove(listener); } /** * Updates the content view's viewport size to have it render the content correctly. * * @param viewCore The ContentViewCore to update. */ public void updateContentViewViewportSize(ContentViewCore viewCore) { if (viewCore == null) return; if (mInGesture || mContentViewScrolling) return; // Update content viewport size only when the top controls are not animating. int contentOffset = (int) rendererContentOffset(); if (contentOffset != 0 && contentOffset != mControlContainerHeight) return; viewCore.setTopControlsHeight(mControlContainerHeight, contentOffset > 0); } @Override public void updateContentViewChildrenState() { ContentViewCore contentViewCore = getActiveContentViewCore(); if (contentViewCore == null) return; ViewGroup view = contentViewCore.getContainerView(); float topViewsTranslation = (getControlOffset() + mControlContainerHeight); applyTranslationToTopChildViews(view, topViewsTranslation); applyMarginToFullChildViews(view, topViewsTranslation); updateContentViewViewportSize(contentViewCore); } /** * Utility routine for ensuring visibility updates are synchronized with * animation, preventing message loop stalls due to untimely invalidation. */ private void scheduleVisibilityUpdate() { final int desiredVisibility = shouldShowAndroidControls() ? View.VISIBLE : View.INVISIBLE; if (mControlContainer.getVisibility() == desiredVisibility) return; mControlContainer.removeCallbacks(mUpdateVisibilityRunnable); ApiCompatibilityUtils.postOnAnimation(mControlContainer, mUpdateVisibilityRunnable); } private void updateVisuals() { TraceEvent.begin("FullscreenManager:updateVisuals"); float offset = getControlOffset(); if (Float.compare(mPreviousControlOffset, offset) != 0) { mPreviousControlOffset = offset; getHtmlApiHandler().updateBubblePosition(); scheduleVisibilityUpdate(); if (shouldShowAndroidControls()) mControlContainer.setTranslationY(getControlOffset()); // In ICS, the toolbar can appear clipped when compositor content is not being drawn // beneath it (at the top of the page, during side swipe). Requesting a layout clears // up the issue (see crbug.com/172631). if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) { if (!mHandler.hasMessages(MSG_ID_CONTROLS_REQUEST_LAYOUT)) { mHandler.sendEmptyMessage(MSG_ID_CONTROLS_REQUEST_LAYOUT); } } for (int i = 0; i < mListeners.size(); i++) { mListeners.get(i).onVisibleContentOffsetChanged(getVisibleContentOffset()); } } final Tab tab = getActiveTab(); if (tab != null && offset == -mControlContainerHeight && mIsEnteringPersistentModeState) { getHtmlApiHandler().enterFullscreen(tab); mIsEnteringPersistentModeState = false; } updateContentViewChildrenState(); float contentOffset = getContentOffset(); if (Float.compare(mPreviousContentOffset, contentOffset) != 0) { for (int i = 0; i < mListeners.size(); i++) { mListeners.get(i).onContentOffsetChanged(contentOffset); } mPreviousContentOffset = contentOffset; } TraceEvent.end("FullscreenManager:updateVisuals"); } /** * @param hide Whether or not to force the top controls Android view to hide. If this is * {@code false} the top controls Android view will show/hide based on position, if * it is {@code true} the top controls Android view will always be hidden. */ public void setHideTopControlsAndroidView(boolean hide) { if (mTopControlsAndroidViewHidden == hide) return; mTopControlsAndroidViewHidden = hide; scheduleVisibilityUpdate(); } private boolean shouldShowAndroidControls() { if (mTopControlsAndroidViewHidden) return false; boolean showControls = getControlOffset() == 0; ContentViewCore contentViewCore = getActiveContentViewCore(); if (contentViewCore == null) return showControls; ViewGroup contentView = contentViewCore.getContainerView(); for (int i = 0; i < contentView.getChildCount(); i++) { View child = contentView.getChildAt(i); if (!(child.getLayoutParams() instanceof FrameLayout.LayoutParams)) continue; FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) child.getLayoutParams(); if (Gravity.TOP == (layoutParams.gravity & Gravity.FILL_VERTICAL)) { showControls = true; break; } } showControls |= !mPersistentControlTokens.isEmpty(); return showControls; } private void applyMarginToFullChildViews(ViewGroup contentView, float margin) { for (int i = 0; i < contentView.getChildCount(); i++) { View child = contentView.getChildAt(i); if (!(child.getLayoutParams() instanceof FrameLayout.LayoutParams)) continue; FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) child.getLayoutParams(); if (layoutParams.height == LayoutParams.MATCH_PARENT && layoutParams.topMargin != (int) margin) { layoutParams.topMargin = (int) margin; child.requestLayout(); TraceEvent.instant("FullscreenManager:child.requestLayout()"); } } } private void applyTranslationToTopChildViews(ViewGroup contentView, float translation) { for (int i = 0; i < contentView.getChildCount(); i++) { View child = contentView.getChildAt(i); if (!(child.getLayoutParams() instanceof FrameLayout.LayoutParams)) continue; FrameLayout.LayoutParams layoutParams = (FrameLayout.LayoutParams) child.getLayoutParams(); if (Gravity.TOP == (layoutParams.gravity & Gravity.FILL_VERTICAL)) { child.setTranslationY(translation); TraceEvent.instant("FullscreenManager:child.setTranslationY()"); } } } private Tab getActiveTab() { Tab tab = getTabModelSelector().getCurrentTab(); return tab; } private ContentViewCore getActiveContentViewCore() { Tab tab = getActiveTab(); return tab != null ? tab.getContentViewCore() : null; } @Override public void setPositionsForTabToNonFullscreen() { Tab tab = getActiveTab(); if (tab == null || tab.isShowingTopControlsEnabled()) { setPositionsForTab(0, mControlContainerHeight); } else { setPositionsForTab(-mControlContainerHeight, 0); } } @Override public void setPositionsForTab(float controlsOffset, float contentOffset) { // Once we get an update from a tab, clear the activity show token and allow the render // to control the positions of the top controls. if (mActivityShowToken != INVALID_TOKEN) { hideControlsPersistent(mActivityShowToken); mActivityShowToken = INVALID_TOKEN; } float rendererControlOffset = Math.round(Math.max(controlsOffset, -mControlContainerHeight)); float rendererContentOffset = Math.min( Math.round(contentOffset), rendererControlOffset + mControlContainerHeight); if (Float.compare(rendererControlOffset, mRendererControlOffset) == 0 && Float.compare(rendererContentOffset, mRendererContentOffset) == 0) { return; } mRendererControlOffset = rendererControlOffset; mRendererContentOffset = rendererContentOffset; updateControlOffset(); if (mControlAnimation == null) updateVisuals(); } /** * @param e The dispatched motion event * @return Whether or not this motion event is in the top control container area and should be * consumed. */ public boolean onInterceptMotionEvent(MotionEvent e) { return e.getY() < getControlOffset() + mControlContainerHeight && !mTopControlsAndroidViewHidden; } /** * Notifies the fullscreen manager that a motion event has occurred. * @param e The dispatched motion event. */ public void onMotionEvent(MotionEvent e) { int eventAction = e.getActionMasked(); if (eventAction == MotionEvent.ACTION_DOWN || eventAction == MotionEvent.ACTION_POINTER_DOWN) { mInGesture = true; getHtmlApiHandler().hideNotificationBubble(); } else if (eventAction == MotionEvent.ACTION_CANCEL || eventAction == MotionEvent.ACTION_UP) { mInGesture = false; updateVisuals(); } } private void update(boolean show) { // On forced show/hide, reset the flags that may suppress ContentView resize. // As this method is also called when tab is switched, this also cleanup the scrolling // flag set based on the previous ContentView's scrolling state. mInGesture = false; mContentViewScrolling = false; if (show) mCurrentShowTime = SystemClock.uptimeMillis(); boolean postHideMessage = false; if (!show) { if (mControlAnimation != null && mCurrentAnimationIsShowing) { postHideMessage = true; } else { long timeDelta = SystemClock.uptimeMillis() - mCurrentShowTime; animateIfNecessary(false, Math.max(mMinShowNotificationMs - timeDelta, 0)); } } else { animateIfNecessary(true, 0); if (mPersistentControlTokens.isEmpty()) postHideMessage = true; } mHandler.removeMessages(MSG_ID_HIDE_CONTROLS); if (postHideMessage) { long timeDelta = SystemClock.uptimeMillis() - mCurrentShowTime; mHandler.sendEmptyMessageDelayed( MSG_ID_HIDE_CONTROLS, Math.max(mMinShowNotificationMs - timeDelta, 0)); } } private void animateIfNecessary(final boolean show, long startDelay) { if (mControlAnimation != null) { if (!mControlAnimation.isRunning() || mCurrentAnimationIsShowing != show) { mControlAnimation.cancel(); mControlAnimation = null; } else { return; } } float destination = show ? 0 : -mControlContainerHeight; long duration = (long) (mMaxAnimationDurationMs * Math.abs((destination - getControlOffset()) / mControlContainerHeight)); mControlAnimation = ObjectAnimator.ofFloat(this, new ControlsOffsetProperty(), destination); mControlAnimation.addListener(new AnimatorListenerAdapter() { private boolean mCanceled = false; @Override public void onAnimationCancel(Animator anim) { mCanceled = true; } @Override public void onAnimationEnd(Animator animation) { if (!show && !mCanceled) mBrowserControlOffset = Float.NaN; mControlAnimation = null; } }); mControlAnimation.setStartDelay(startDelay); mControlAnimation.setDuration(duration); mControlAnimation.start(); mCurrentAnimationIsShowing = show; } @Override public void onContentViewScrollingStateChanged(boolean scrolling) { mContentViewScrolling = scrolling; if (!scrolling) updateVisuals(); } }
package com.reactnativenavigation.controllers; import android.content.Intent; import android.os.Bundle; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.Promise; import com.reactnativenavigation.NavigationApplication; import com.reactnativenavigation.params.ActivityParams; import com.reactnativenavigation.params.ContextualMenuParams; import com.reactnativenavigation.params.FabParams; import com.reactnativenavigation.params.LightBoxParams; import com.reactnativenavigation.params.ScreenParams; import com.reactnativenavigation.params.SlidingOverlayParams; import com.reactnativenavigation.params.SnackbarParams; import com.reactnativenavigation.params.TitleBarButtonParams; import com.reactnativenavigation.params.TitleBarLeftButtonParams; import com.reactnativenavigation.params.parsers.ActivityParamsParser; import com.reactnativenavigation.params.parsers.ScreenParamsParser; import com.reactnativenavigation.utils.OrientationHelper; import com.reactnativenavigation.views.SideMenu.Side; import java.util.List; public class NavigationCommandsHandler { private static final String ACTIVITY_PARAMS_BUNDLE = "ACTIVITY_PARAMS_BUNDLE"; static ActivityParams parseActivityParams(Intent intent) { return ActivityParamsParser.parse(intent.getBundleExtra(NavigationCommandsHandler.ACTIVITY_PARAMS_BUNDLE)); } /** * start a new activity with CLEAR_TASK | NEW_TASK * * @param params ActivityParams as bundle */ public static void startApp(Bundle params) { Intent intent = new Intent(NavigationApplication.instance, NavigationActivity.class); IntentDataHandler.onStartApp(intent); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK); intent.putExtra(ACTIVITY_PARAMS_BUNDLE, params); NavigationApplication.instance.startActivity(intent); } public static void push(Bundle screenParams) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } final ScreenParams params = ScreenParamsParser.parse(screenParams); NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.push(params); } }); } public static void pop(Bundle screenParams) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } final ScreenParams params = ScreenParamsParser.parse(screenParams); NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.pop(params); } }); } public static void popToRoot(Bundle screenParams) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } final ScreenParams params = ScreenParamsParser.parse(screenParams); NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.popToRoot(params); } }); } public static void newStack(Bundle screenParams) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } final ScreenParams params = ScreenParamsParser.parse(screenParams); NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.newStack(params); } }); } public static void setTopBarVisible(final String screenInstanceID, final boolean hidden, final boolean animated) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setTopBarVisible(screenInstanceID, hidden, animated); } }); } public static void setBottomTabsVisible(final boolean hidden, final boolean animated) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setBottomTabsVisible(hidden, animated); } }); } public static void setScreenTitleBarTitle(final String screenInstanceId, final String title) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setTitleBarTitle(screenInstanceId, title); } }); } public static void setScreenTitleBarSubtitle(final String screenInstanceId, final String subtitle) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setTitleBarSubtitle(screenInstanceId, subtitle); } }); } public static void showModal(final Bundle params) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.showModal(ScreenParamsParser.parse(params)); } }); } public static void showLightBox(final LightBoxParams params) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.showLightBox(params); } }); } public static void dismissLightBox() { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.dismissLightBox(); } }); } public static void setScreenTitleBarRightButtons(final String screenInstanceId, final String navigatorEventId, final List<TitleBarButtonParams> titleBarButtons) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setTitleBarButtons(screenInstanceId, navigatorEventId, titleBarButtons); } }); } public static void setScreenTitleBarLeftButtons(final String screenInstanceId, final String navigatorEventId, final TitleBarLeftButtonParams titleBarButtons) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setTitleBarLeftButton(screenInstanceId, navigatorEventId, titleBarButtons); } }); } public static void setScreenFab(final String screenInstanceId, final String navigatorEventId, final FabParams fab) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setScreenFab(screenInstanceId, navigatorEventId, fab); } }); } public static void dismissTopModal() { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.dismissTopModal(); } }); } public static void dismissAllModals() { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.dismissAllModals(); } }); } public static void toggleSideMenuVisible(final boolean animated, final Side side) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.toggleSideMenuVisible(animated, side); } }); } public static void setSideMenuVisible(final boolean animated, final boolean visible, final Side side) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setSideMenuVisible(animated, visible, side); } }); } public static void selectTopTabByTabIndex(final String screenInstanceId, final int index) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.selectTopTabByTabIndex(screenInstanceId, index); } }); } public static void selectTopTabByScreen(final String screenInstanceId) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.selectTopTabByScreen(screenInstanceId); } }); } public static void selectBottomTabByTabIndex(final Integer index) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.selectBottomTabByTabIndex(index); } }); } public static void selectBottomTabByNavigatorId(final String navigatorId) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.selectBottomTabByNavigatorId(navigatorId); } }); } public static void setBottomTabBadgeByIndex(final Integer index, final String badge) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setBottomTabBadgeByIndex(index, badge); } }); } public static void setBottomTabBadgeByNavigatorId(final String navigatorId, final String badge) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.setBottomTabBadgeByNavigatorId(navigatorId, badge); } }); } public static void showSlidingOverlay(final SlidingOverlayParams params) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.showSlidingOverlay(params); } }); } public static void hideSlidingOverlay() { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.hideSlidingOverlay(); } }); } public static void showSnackbar(final SnackbarParams params) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.showSnackbar(params); } }); } public static void showContextualMenu(final String screenInstanceId, final ContextualMenuParams params, final Callback onButtonClicked) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.showContextualMenu(screenInstanceId, params, onButtonClicked); } }); } public static void dismissContextualMenu(final String screenInstanceId) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.dismissContextualMenu(screenInstanceId); } }); } public static void dismissSnackbar() { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } NavigationApplication.instance.runOnMainThread(new Runnable() { @Override public void run() { currentActivity.dismissSnackbar(); } }); } public static void getOrientation(Promise promise) { final NavigationActivity currentActivity = NavigationActivity.currentActivity; if (currentActivity == null) { return; } promise.resolve(OrientationHelper.getOrientation(currentActivity)); } }
/** * Licensed to Cloudera, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Cloudera, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.sqoop; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import junit.framework.TestCase; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.StringUtils; import com.cloudera.sqoop.manager.ConnManager; import com.cloudera.sqoop.manager.HsqldbManager; import com.cloudera.sqoop.manager.ManagerFactory; import com.cloudera.sqoop.metastore.JobData; import com.cloudera.sqoop.metastore.TestSavedJobs; import com.cloudera.sqoop.testutil.BaseSqoopTestCase; import com.cloudera.sqoop.testutil.CommonArgs; import com.cloudera.sqoop.tool.ImportTool; import com.cloudera.sqoop.tool.JobTool; /** * Test the incremental import functionality. * * These all make use of the auto-connect hsqldb-based metastore. * The metastore URL is configured to be in-memory, and drop all * state between individual tests. */ public class TestIncrementalImport extends TestCase { public static final Log LOG = LogFactory.getLog( TestIncrementalImport.class.getName()); // What database do we read from. public static final String SOURCE_DB_URL = "jdbc:hsqldb:mem:incremental"; @Override public void setUp() throws Exception { // Delete db state between tests. TestSavedJobs.resetJobSchema(); resetSourceDataSchema(); } public static void resetSourceDataSchema() throws SQLException { SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); TestSavedJobs.resetSchema(options); } public static Configuration newConf() { return TestSavedJobs.newConf(); } /** * Assert that a table has a specified number of rows. */ private void assertRowCount(String table, int numRows) throws SQLException { SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); HsqldbManager manager = new HsqldbManager(options); Connection c = manager.getConnection(); PreparedStatement s = null; ResultSet rs = null; try { s = c.prepareStatement("SELECT COUNT(*) FROM " + table); rs = s.executeQuery(); if (!rs.next()) { fail("No resultset"); } int realNumRows = rs.getInt(1); assertEquals(numRows, realNumRows); LOG.info("Expected " + numRows + " rows -- ok."); } finally { if (null != s) { try { s.close(); } catch (SQLException sqlE) { LOG.warn("exception: " + sqlE); } } if (null != rs) { try { rs.close(); } catch (SQLException sqlE) { LOG.warn("exception: " + sqlE); } } } } /** * Insert rows with id = [low, hi) into tableName. */ private void insertIdRows(String tableName, int low, int hi) throws SQLException { SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); HsqldbManager manager = new HsqldbManager(options); Connection c = manager.getConnection(); PreparedStatement s = null; try { s = c.prepareStatement("INSERT INTO " + tableName + " VALUES(?)"); for (int i = low; i < hi; i++) { s.setInt(1, i); s.executeUpdate(); } c.commit(); } finally { s.close(); } } /** * Insert rows with id = [low, hi) into tableName with * the timestamp column set to the specified ts. */ private void insertIdTimestampRows(String tableName, int low, int hi, Timestamp ts) throws SQLException { LOG.info("Inserting id rows in [" + low + ", " + hi + ") @ " + ts); SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); HsqldbManager manager = new HsqldbManager(options); Connection c = manager.getConnection(); PreparedStatement s = null; try { s = c.prepareStatement("INSERT INTO " + tableName + " VALUES(?,?)"); for (int i = low; i < hi; i++) { s.setInt(1, i); s.setTimestamp(2, ts); s.executeUpdate(); } c.commit(); } finally { s.close(); } } /** * Create a table with an 'id' column full of integers. */ private void createIdTable(String tableName, int insertRows) throws SQLException { SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); HsqldbManager manager = new HsqldbManager(options); Connection c = manager.getConnection(); PreparedStatement s = null; try { s = c.prepareStatement("CREATE TABLE " + tableName + "(id INT NOT NULL)"); s.executeUpdate(); c.commit(); insertIdRows(tableName, 0, insertRows); } finally { s.close(); } } /** * Create a table with an 'id' column full of integers and a * last_modified column with timestamps. */ private void createTimestampTable(String tableName, int insertRows, Timestamp baseTime) throws SQLException { SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); HsqldbManager manager = new HsqldbManager(options); Connection c = manager.getConnection(); PreparedStatement s = null; try { s = c.prepareStatement("CREATE TABLE " + tableName + "(id INT NOT NULL, " + "last_modified TIMESTAMP)"); s.executeUpdate(); c.commit(); insertIdTimestampRows(tableName, 0, insertRows, baseTime); } finally { s.close(); } } /** * Delete all files in a directory for a table. */ public void clearDir(String tableName) { try { FileSystem fs = FileSystem.getLocal(new Configuration()); Path warehouse = new Path(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR); Path tableDir = new Path(warehouse, tableName); fs.delete(tableDir, true); } catch (Exception e) { fail("Got unexpected exception: " + StringUtils.stringifyException(e)); } } /** * Look at a directory that should contain files full of an imported 'id' * column. Assert that all numbers in [0, expectedNums) are present * in order. */ public void assertDirOfNumbers(String tableName, int expectedNums) { try { FileSystem fs = FileSystem.getLocal(new Configuration()); Path warehouse = new Path(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR); Path tableDir = new Path(warehouse, tableName); FileStatus [] stats = fs.listStatus(tableDir); String [] fileNames = new String[stats.length]; for (int i = 0; i < stats.length; i++) { fileNames[i] = stats[i].getPath().toString(); } Arrays.sort(fileNames); // Read all the files in sorted order, adding the value lines to the list. List<String> receivedNums = new ArrayList<String>(); for (String fileName : fileNames) { if (fileName.startsWith("_") || fileName.startsWith(".")) { continue; } BufferedReader r = new BufferedReader( new InputStreamReader(fs.open(new Path(fileName)))); try { while (true) { String s = r.readLine(); if (null == s) { break; } receivedNums.add(s.trim()); } } finally { r.close(); } } assertEquals(expectedNums, receivedNums.size()); // Compare the received values with the expected set. for (int i = 0; i < expectedNums; i++) { assertEquals((int) i, (int) Integer.valueOf(receivedNums.get(i))); } } catch (Exception e) { fail("Got unexpected exception: " + StringUtils.stringifyException(e)); } } /** * Assert that a directory contains a file with exactly one line * in it, containing the prescribed number 'val'. */ public void assertSpecificNumber(String tableName, int val) { try { FileSystem fs = FileSystem.getLocal(new Configuration()); Path warehouse = new Path(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR); Path tableDir = new Path(warehouse, tableName); FileStatus [] stats = fs.listStatus(tableDir); String [] filePaths = new String[stats.length]; for (int i = 0; i < stats.length; i++) { filePaths[i] = stats[i].getPath().toString(); } // Read the first file that is not a hidden file. boolean foundVal = false; for (String filePath : filePaths) { String fileName = new Path(filePath).getName(); if (fileName.startsWith("_") || fileName.startsWith(".")) { continue; } if (foundVal) { // Make sure we don't have two or more "real" files in the dir. fail("Got an extra data-containing file in this directory."); } BufferedReader r = new BufferedReader( new InputStreamReader(fs.open(new Path(filePath)))); try { String s = r.readLine(); if (null == s) { fail("Unexpected empty file " + filePath + "."); } assertEquals(val, (int) Integer.valueOf(s.trim())); String nextLine = r.readLine(); if (nextLine != null) { fail("Expected only one result, but got another line: " + nextLine); } // Successfully got the value we were looking for. foundVal = true; } finally { r.close(); } } } catch (IOException e) { fail("Got unexpected exception: " + StringUtils.stringifyException(e)); } } public void runImport(SqoopOptions options, List<String> args) { try { Sqoop importer = new Sqoop(new ImportTool(), options.getConf(), options); int ret = Sqoop.runSqoop(importer, args.toArray(new String[0])); assertEquals("Failure during job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } } /** * Return a list of arguments to import the specified table. */ private List<String> getArgListForTable(String tableName, boolean commonArgs, boolean isAppend) { List<String> args = new ArrayList<String>(); if (commonArgs) { CommonArgs.addHadoopFlags(args); } args.add("--connect"); args.add(SOURCE_DB_URL); args.add("--table"); args.add(tableName); args.add("--warehouse-dir"); args.add(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR); if (isAppend) { args.add("--incremental"); args.add("append"); args.add("--check-column"); args.add("id"); } else { args.add("--incremental"); args.add("lastmodified"); args.add("--check-column"); args.add("last_modified"); } args.add("--columns"); args.add("id"); args.add("-m"); args.add("1"); return args; } /** * Create a job with the specified name, where the job performs * an import configured with 'jobArgs'. */ private void createJob(String jobName, List<String> jobArgs) { createJob(jobName, jobArgs, newConf()); } /** * Create a job with the specified name, where the job performs * an import configured with 'jobArgs', using the provided configuration * as defaults. */ private void createJob(String jobName, List<String> jobArgs, Configuration conf) { try { SqoopOptions options = new SqoopOptions(); options.setConf(conf); Sqoop makeJob = new Sqoop(new JobTool(), conf, options); List<String> args = new ArrayList<String>(); args.add("--create"); args.add(jobName); args.add("--"); args.add("import"); args.addAll(jobArgs); int ret = Sqoop.runSqoop(makeJob, args.toArray(new String[0])); assertEquals("Failure to create job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop to create job: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } } /** * Run the specified job. */ private void runJob(String jobName) { runJob(jobName, newConf()); } /** * Run the specified job. */ private void runJob(String jobName, Configuration conf) { try { SqoopOptions options = new SqoopOptions(); options.setConf(conf); Sqoop runJob = new Sqoop(new JobTool(), conf, options); List<String> args = new ArrayList<String>(); args.add("--exec"); args.add(jobName); int ret = Sqoop.runSqoop(runJob, args.toArray(new String[0])); assertEquals("Failure to run job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop to run job: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } } // Incremental import of an empty table, no metastore. public void testEmptyAppendImport() throws Exception { final String TABLE_NAME = "emptyAppend1"; createIdTable(TABLE_NAME, 0); List<String> args = getArgListForTable(TABLE_NAME, true, true); Configuration conf = newConf(); SqoopOptions options = new SqoopOptions(); options.setConf(conf); runImport(options, args); assertDirOfNumbers(TABLE_NAME, 0); } // Incremental import of a filled table, no metastore. public void testFullAppendImport() throws Exception { final String TABLE_NAME = "fullAppend1"; createIdTable(TABLE_NAME, 10); List<String> args = getArgListForTable(TABLE_NAME, true, true); Configuration conf = newConf(); SqoopOptions options = new SqoopOptions(); options.setConf(conf); runImport(options, args); assertDirOfNumbers(TABLE_NAME, 10); } public void testEmptyJobAppend() throws Exception { // Create a job and run an import on an empty table. // Nothing should happen. final String TABLE_NAME = "emptyJob"; createIdTable(TABLE_NAME, 0); List<String> args = getArgListForTable(TABLE_NAME, false, true); createJob("emptyJob", args); runJob("emptyJob"); assertDirOfNumbers(TABLE_NAME, 0); // Running the job a second time should result in // nothing happening, it's still empty. runJob("emptyJob"); assertDirOfNumbers(TABLE_NAME, 0); } public void testEmptyThenFullJobAppend() throws Exception { // Create an empty table. Import it; nothing happens. // Add some rows. Verify they are appended. final String TABLE_NAME = "emptyThenFull"; createIdTable(TABLE_NAME, 0); List<String> args = getArgListForTable(TABLE_NAME, false, true); createJob(TABLE_NAME, args); runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 0); // Now add some rows. insertIdRows(TABLE_NAME, 0, 10); // Running the job a second time should import 10 rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 10); // Add some more rows. insertIdRows(TABLE_NAME, 10, 20); // Import only those rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 20); } public void testAppend() throws Exception { // Create a table with data in it; import it. // Then add more data, verify that only the incremental data is pulled. final String TABLE_NAME = "append"; createIdTable(TABLE_NAME, 10); List<String> args = getArgListForTable(TABLE_NAME, false, true); createJob(TABLE_NAME, args); runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 10); // Add some more rows. insertIdRows(TABLE_NAME, 10, 20); // Import only those rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 20); } public void testEmptyLastModified() throws Exception { final String TABLE_NAME = "emptyLastModified"; createTimestampTable(TABLE_NAME, 0, null); List<String> args = getArgListForTable(TABLE_NAME, true, false); Configuration conf = newConf(); SqoopOptions options = new SqoopOptions(); options.setConf(conf); runImport(options, args); assertDirOfNumbers(TABLE_NAME, 0); } public void testFullLastModifiedImport() throws Exception { // Given a table of rows imported in the past, // see that they are imported. final String TABLE_NAME = "fullLastModified"; Timestamp thePast = new Timestamp(System.currentTimeMillis() - 100); createTimestampTable(TABLE_NAME, 10, thePast); List<String> args = getArgListForTable(TABLE_NAME, true, false); Configuration conf = newConf(); SqoopOptions options = new SqoopOptions(); options.setConf(conf); runImport(options, args); assertDirOfNumbers(TABLE_NAME, 10); } public void testNoImportFromTheFuture() throws Exception { // If last-modified dates for writes are serialized to be in the // future w.r.t. an import, do not import these rows. final String TABLE_NAME = "futureLastModified"; Timestamp theFuture = new Timestamp(System.currentTimeMillis() + 1000000); createTimestampTable(TABLE_NAME, 10, theFuture); List<String> args = getArgListForTable(TABLE_NAME, true, false); Configuration conf = newConf(); SqoopOptions options = new SqoopOptions(); options.setConf(conf); runImport(options, args); assertDirOfNumbers(TABLE_NAME, 0); } public void testEmptyJobLastMod() throws Exception { // Create a job and run an import on an empty table. // Nothing should happen. final String TABLE_NAME = "emptyJobLastMod"; createTimestampTable(TABLE_NAME, 0, null); List<String> args = getArgListForTable(TABLE_NAME, false, false); args.add("--append"); createJob("emptyJobLastMod", args); runJob("emptyJobLastMod"); assertDirOfNumbers(TABLE_NAME, 0); // Running the job a second time should result in // nothing happening, it's still empty. runJob("emptyJobLastMod"); assertDirOfNumbers(TABLE_NAME, 0); } public void testEmptyThenFullJobLastMod() throws Exception { // Create an empty table. Import it; nothing happens. // Add some rows. Verify they are appended. final String TABLE_NAME = "emptyThenFullTimestamp"; createTimestampTable(TABLE_NAME, 0, null); List<String> args = getArgListForTable(TABLE_NAME, false, false); args.add("--append"); createJob(TABLE_NAME, args); runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 0); long importWasBefore = System.currentTimeMillis(); // Let some time elapse. Thread.sleep(50); long rowsAddedTime = System.currentTimeMillis() - 5; // Check: we are adding rows after the previous import time // and before the current time. assertTrue(rowsAddedTime > importWasBefore); assertTrue(rowsAddedTime < System.currentTimeMillis()); insertIdTimestampRows(TABLE_NAME, 0, 10, new Timestamp(rowsAddedTime)); // Running the job a second time should import 10 rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 10); // Add some more rows. importWasBefore = System.currentTimeMillis(); Thread.sleep(50); rowsAddedTime = System.currentTimeMillis() - 5; assertTrue(rowsAddedTime > importWasBefore); assertTrue(rowsAddedTime < System.currentTimeMillis()); insertIdTimestampRows(TABLE_NAME, 10, 20, new Timestamp(rowsAddedTime)); // Import only those rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 20); } public void testAppendWithTimestamp() throws Exception { // Create a table with data in it; import it. // Then add more data, verify that only the incremental data is pulled. final String TABLE_NAME = "appendTimestamp"; Timestamp thePast = new Timestamp(System.currentTimeMillis() - 100); createTimestampTable(TABLE_NAME, 10, thePast); List<String> args = getArgListForTable(TABLE_NAME, false, false); args.add("--append"); createJob(TABLE_NAME, args); runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 10); // Add some more rows. long importWasBefore = System.currentTimeMillis(); Thread.sleep(50); long rowsAddedTime = System.currentTimeMillis() - 5; assertTrue(rowsAddedTime > importWasBefore); assertTrue(rowsAddedTime < System.currentTimeMillis()); insertIdTimestampRows(TABLE_NAME, 10, 20, new Timestamp(rowsAddedTime)); // Import only those rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 20); } public void testModifyWithTimestamp() throws Exception { // Create a table with data in it; import it. // Then modify some existing rows, and verify that we only grab // those rows. final String TABLE_NAME = "modifyTimestamp"; Timestamp thePast = new Timestamp(System.currentTimeMillis() - 100); createTimestampTable(TABLE_NAME, 10, thePast); List<String> args = getArgListForTable(TABLE_NAME, false, false); createJob(TABLE_NAME, args); runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 10); // Modify a row. long importWasBefore = System.currentTimeMillis(); Thread.sleep(50); long rowsAddedTime = System.currentTimeMillis() - 5; assertTrue(rowsAddedTime > importWasBefore); assertTrue(rowsAddedTime < System.currentTimeMillis()); SqoopOptions options = new SqoopOptions(); options.setConnectString(SOURCE_DB_URL); HsqldbManager manager = new HsqldbManager(options); Connection c = manager.getConnection(); PreparedStatement s = null; try { s = c.prepareStatement("UPDATE " + TABLE_NAME + " SET id=?, last_modified=? WHERE id=?"); s.setInt(1, 4000); // the first row should have '4000' in it now. s.setTimestamp(2, new Timestamp(rowsAddedTime)); s.setInt(3, 0); s.executeUpdate(); c.commit(); } finally { s.close(); } // Import only the new row. clearDir(TABLE_NAME); runJob(TABLE_NAME); assertSpecificNumber(TABLE_NAME, 4000); } /** * ManagerFactory returning an HSQLDB ConnManager which allows you to * specify the current database timestamp. */ public static class InstrumentHsqldbManagerFactory extends ManagerFactory { @Override public ConnManager accept(JobData data) { LOG.info("Using instrumented manager"); return new InstrumentHsqldbManager(data.getSqoopOptions()); } } /** * Hsqldb ConnManager that lets you set the current reported timestamp * from the database, to allow testing of boundary conditions for imports. */ public static class InstrumentHsqldbManager extends HsqldbManager { private static Timestamp curTimestamp; public InstrumentHsqldbManager(SqoopOptions options) { super(options); } @Override public Timestamp getCurrentDbTimestamp() { return InstrumentHsqldbManager.curTimestamp; } public static void setCurrentDbTimestamp(Timestamp t) { InstrumentHsqldbManager.curTimestamp = t; } } public void testTimestampBoundary() throws Exception { // Run an import, and then insert rows with the last-modified timestamp // set to the exact time when the first import runs. Run a second import // and ensure that we pick up the new data. long now = System.currentTimeMillis(); final String TABLE_NAME = "boundaryTimestamp"; Timestamp thePast = new Timestamp(now - 100); createTimestampTable(TABLE_NAME, 10, thePast); Timestamp firstJobTime = new Timestamp(now); InstrumentHsqldbManager.setCurrentDbTimestamp(firstJobTime); // Configure the job to use the instrumented Hsqldb manager. Configuration conf = newConf(); conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, InstrumentHsqldbManagerFactory.class.getName()); List<String> args = getArgListForTable(TABLE_NAME, false, false); args.add("--append"); createJob(TABLE_NAME, args, conf); runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 10); // Add some more rows with the timestamp equal to the job run timestamp. insertIdTimestampRows(TABLE_NAME, 10, 20, firstJobTime); assertRowCount(TABLE_NAME, 20); // Run a second job with the clock advanced by 100 ms. Timestamp secondJobTime = new Timestamp(now + 100); InstrumentHsqldbManager.setCurrentDbTimestamp(secondJobTime); // Import only those rows. runJob(TABLE_NAME); assertDirOfNumbers(TABLE_NAME, 20); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.get; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.*; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.Test; import java.io.IOException; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.*; public class GetActionTests extends ElasticsearchIntegrationTest { @Test public void simpleGetTests() { assertAcked(prepareCreate("test") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) .addAlias(new Alias("alias"))); ensureGreen(); GetResponse response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); logger.info("--> realtime get 1"); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime get 1 (no source, implicit)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFields(Strings.EMPTY_ARRAY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getFields().size(), equalTo(0)); assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no source, explicit)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFetchSource(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getFields().size(), equalTo(0)); assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no type)"); response = client().prepareGet(indexOrAlias(), null, "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> non realtime get 1"); response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> realtime fetch of field (requires fetching parsing source)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (requires fetching parsing source)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").setFetchSource("field1", null).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap(), hasKey("field1")); assertThat(response.getSourceAsMap(), not(hasKey("field2"))); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> flush the index, so we load it from it"); flush(); logger.info("--> realtime get 1 (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> non realtime get 1 (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").setFetchSource(true).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), not(nullValue())); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> update doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").get(); logger.info("--> realtime get 1"); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_1")); logger.info("--> update doc 1 again"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_2", "field2", "value2_2").get(); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2")); DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get(); assertThat(deleteResponse.isFound(), equalTo(true)); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); } private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } @Test public void simpleMultiGetTests() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); ensureGreen(); MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); } response = client().prepareMultiGet() .add(indexOrAlias(), "type1", "1") .add(indexOrAlias(), "type1", "15") .add(indexOrAlias(), "type1", "3") .add(indexOrAlias(), "type1", "9") .add(indexOrAlias(), "type1", "11").get(); assertThat(response.getResponses().length, equalTo(5)); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("15")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(false)); assertThat(response.getResponses()[2].getId(), equalTo("3")); assertThat(response.getResponses()[2].getIndex(), equalTo("test")); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[3].getId(), equalTo("9")); assertThat(response.getResponses()[3].getIndex(), equalTo("test")); assertThat(response.getResponses()[3].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[3].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[4].getId(), equalTo("11")); assertThat(response.getResponses()[4].getIndex(), equalTo("test")); assertThat(response.getResponses()[4].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[4].getResponse().isExists(), equalTo(false)); // multi get with specific field response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").fields("field")) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").fields("field")) .get(); assertThat(response.getResponses().length, equalTo(2)); assertThat(response.getResponses()[0].getResponse().getSourceAsBytes(), nullValue()); assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1")); } @Test public void realtimeGetWithCompressBackcompat() throws Exception { assertAcked(prepareCreate("test") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1).put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)) .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("_source").field("compress", true).endObject().endObject().endObject())); ensureGreen(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 10000; i++) { sb.append((char) i); } String fieldValue = sb.toString(); client().prepareIndex("test", "type", "1").setSource("field", fieldValue).get(); // realtime get GetResponse getResponse = client().prepareGet("test", "type", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue)); } @Test public void getFieldsWithDifferentTypes() throws Exception { assertAcked(prepareCreate("test").setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) .addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject()) .addMapping("type2", jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("str").field("type", "string").field("store", "yes").endObject() .startObject("strs").field("type", "string").field("store", "yes").endObject() .startObject("int").field("type", "integer").field("store", "yes").endObject() .startObject("ints").field("type", "integer").field("store", "yes").endObject() .startObject("date").field("type", "date").field("store", "yes").endObject() .startObject("binary").field("type", "binary").field("store", "yes").endObject() .endObject() .endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource( jsonBuilder().startObject() .field("str", "test") .field("strs", new String[]{"A", "B", "C"}) .field("int", 42) .field("ints", new int[]{1, 2, 3, 4}) .field("date", "2012-11-13T15:26:14.000Z") .field("binary", Base64.encodeBytes(new byte[]{1, 2, 3})) .endObject()).get(); client().prepareIndex("test", "type2", "1").setSource( jsonBuilder().startObject() .field("str", "test") .field("strs", new String[]{"A", "B", "C"}) .field("int", 42) .field("ints", new int[]{1, 2, 3, 4}) .field("date", "2012-11-13T15:26:14.000Z") .field("binary", Base64.encodeBytes(new byte[]{1, 2, 3})) .endObject()).get(); // realtime get with stored source logger.info("--> realtime get (from source)"); GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped logger.info("--> realtime get (from stored fields)"); getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3}))); logger.info("--> flush the index, so we load it from it"); flush(); logger.info("--> non realtime get (from source)"); getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped logger.info("--> non realtime get (from stored fields)"); getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat((String) getResponse.getField("str").getValue(), equalTo("test")); assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C")); assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42)); assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4)); assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z")); assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3}))); } @Test public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "string").field("store", "yes").endObject() .endObject() .endObject().endObject().string(); String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("field").field("type", "string").field("store", "yes").endObject() .endObject() .endObject().endObject().string(); assertAcked(prepareCreate("test") .addMapping("type1", mapping1) .addMapping("type2", mapping2) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); ensureGreen(); GetResponse response = client().prepareGet("test", "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); response = client().prepareGet("test", "type2", "1").get(); assertThat(response.isExists(), equalTo(false)); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").endObject()).get(); client().prepareIndex("test", "type2", "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").endObject()).get(); response = client().prepareGet("test", "type1", "1").setFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getType(), equalTo("type1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); response = client().prepareGet("test", "type2", "1").setFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getType(), equalTo("type2")); assertThat(response.getId(), equalTo("1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); // Now test values being fetched from stored fields. refresh(); response = client().prepareGet("test", "type1", "1").setFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); response = client().prepareGet("test", "type2", "1").setFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getFields().size(), equalTo(1)); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); } @Test public void testThatGetFromTranslogShouldWorkWithExcludeBackcompat() throws Exception { String index = "test"; String type = "type1"; String mapping = jsonBuilder() .startObject() .startObject(type) .startObject("_source") .array("excludes", "excluded") .endObject() .endObject() .endObject() .string(); assertAcked(prepareCreate(index) .addMapping(type, mapping) .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); client().prepareIndex(index, type, "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").field("excluded", "should not be seen").endObject()) .get(); GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get(); client().admin().indices().prepareFlush(index).get(); GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get(); assertThat(responseBeforeFlush.isExists(), is(true)); assertThat(responseAfterFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("field")); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); } @Test public void testThatGetFromTranslogShouldWorkWithIncludeBackcompat() throws Exception { String index = "test"; String type = "type1"; String mapping = jsonBuilder() .startObject() .startObject(type) .startObject("_source") .array("includes", "included") .endObject() .endObject() .endObject() .string(); assertAcked(prepareCreate(index) .addMapping(type, mapping) .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); client().prepareIndex(index, type, "1") .setSource(jsonBuilder().startObject().field("field", "1", "2").field("included", "should be seen").endObject()) .get(); GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").get(); flush(); GetResponse responseAfterFlush = client().prepareGet(index, type, "1").get(); assertThat(responseBeforeFlush.isExists(), is(true)); assertThat(responseAfterFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); } @SuppressWarnings("unchecked") @Test public void testThatGetFromTranslogShouldWorkWithIncludeExcludeAndFieldsBackcompat() throws Exception { String index = "test"; String type = "type1"; String mapping = jsonBuilder() .startObject() .startObject(type) .startObject("_source") .array("includes", "included") .array("excludes", "excluded") .endObject() .endObject() .endObject() .string(); assertAcked(prepareCreate(index) .addMapping(type, mapping) .setSettings("index.refresh_interval", -1, IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)); client().prepareIndex(index, type, "1") .setSource(jsonBuilder().startObject() .field("field", "1", "2") .startObject("included").field("field", "should be seen").field("field2", "extra field to remove").endObject() .startObject("excluded").field("field", "should not be seen").field("field2", "should not be seen").endObject() .endObject()) .get(); GetResponse responseBeforeFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get(); assertThat(responseBeforeFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("excluded"))); assertThat(responseBeforeFlush.getSourceAsMap(), not(hasKey("field"))); assertThat(responseBeforeFlush.getSourceAsMap(), hasKey("included")); // now tests that extra source filtering works as expected GetResponse responseBeforeFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field") .setFetchSource(new String[]{"field", "*.field"}, new String[]{"*.field2"}).get(); assertThat(responseBeforeFlushWithExtraFilters.isExists(), is(true)); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("excluded"))); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), not(hasKey("field"))); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsMap(), hasKey("included")); assertThat((Map<String, Object>) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), hasKey("field")); assertThat((Map<String, Object>) responseBeforeFlushWithExtraFilters.getSourceAsMap().get("included"), not(hasKey("field2"))); flush(); GetResponse responseAfterFlush = client().prepareGet(index, type, "1").setFields("_source", "included.field", "excluded.field").get(); GetResponse responseAfterFlushWithExtraFilters = client().prepareGet(index, type, "1").setFields("included.field", "excluded.field") .setFetchSource("*.field", "*.field2").get(); assertThat(responseAfterFlush.isExists(), is(true)); assertThat(responseBeforeFlush.getSourceAsString(), is(responseAfterFlush.getSourceAsString())); assertThat(responseAfterFlushWithExtraFilters.isExists(), is(true)); assertThat(responseBeforeFlushWithExtraFilters.getSourceAsString(), is(responseAfterFlushWithExtraFilters.getSourceAsString())); } @Test public void testGetWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); ensureGreen(); GetResponse response = client().prepareGet("test", "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); // From translog: response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); fail(); } catch (VersionConflictEngineException e) { //all good } // From Lucene index: refresh(); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1l)); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { //all good } logger.info("--> index doc 1 again, so increasing the version"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); // From translog: response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); fail(); } catch (VersionConflictEngineException e) { //all good } response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); // From Lucene index: refresh(); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { //all good } response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2l)); } @Test public void testMultiGetWithVersion() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); ensureGreen(); MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("1")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getFailure(), nullValue()); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[2].getFailure(), notNullValue()); assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1")); assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("VersionConflictEngineException")); //Version from Lucene index refresh(); response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2)) .setRealtime(false) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("1")); assertThat(response.getResponses()[1].getFailure(), nullValue()); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[2].getFailure(), notNullValue()); assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1")); assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("VersionConflictEngineException")); for (int i = 0; i < 3; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("2")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); assertThat(response.getResponses()[1].getFailure(), notNullValue()); assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("VersionConflictEngineException")); assertThat(response.getResponses()[2].getId(), equalTo("2")); assertThat(response.getResponses()[2].getIndex(), equalTo("test")); assertThat(response.getResponses()[2].getFailure(), nullValue()); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); //Version from Lucene index refresh(); response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2)) .setRealtime(false) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("2")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); assertThat(response.getResponses()[1].getFailure(), notNullValue()); assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("VersionConflictEngineException")); assertThat(response.getResponses()[2].getId(), equalTo("2")); assertThat(response.getResponses()[2].getIndex(), equalTo("test")); assertThat(response.getResponses()[2].getFailure(), nullValue()); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); } @Test public void testGetFields_metaData() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); client().prepareIndex("test", "my-type1", "1") .setRouting("1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .get(); GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") .setRouting("1") .setFields("field1", "_routing") .get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true)); assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); flush(); client().prepareGet(indexOrAlias(), "my-type1", "1") .setFields("field1", "_routing") .setRouting("1") .get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true)); assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); } @Test public void testGetFields_nonLeafField() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .addMapping("my-type1", jsonBuilder().startObject().startObject("my-type1").startObject("properties") .startObject("field1").startObject("properties") .startObject("field2").field("type", "string").endObject() .endObject().endObject() .endObject().endObject().endObject()) .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))); client().prepareIndex("test", "my-type1", "1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .get(); try { client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("field1").get(); fail(); } catch (IllegalArgumentException e) { //all well } flush(); try { client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("field1").get(); fail(); } catch (IllegalArgumentException e) { //all well } } @Test @TestLogging("index.shard.service:TRACE,cluster.service:TRACE,action.admin.indices.flush:TRACE") public void testGetFields_complexField() throws Exception { assertAcked(prepareCreate("my-index") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) .addMapping("my-type2", jsonBuilder().startObject().startObject("my-type2").startObject("properties") .startObject("field1").field("type", "object").startObject("properties") .startObject("field2").field("type", "object").startObject("properties") .startObject("field3").field("type", "object").startObject("properties") .startObject("field4").field("type", "string").field("store", "yes") .endObject().endObject() .endObject().endObject() .endObject().endObject() .endObject().endObject().endObject())); BytesReference source = jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") .startArray("field3") .startObject() .field("field4", "value1") .endObject() .endArray() .endObject() .endObject() .startObject() .startObject("field2") .startArray("field3") .startObject() .field("field4", "value2") .endObject() .endArray() .endObject() .endObject() .endArray() .endObject().bytes(); logger.info("indexing documents"); client().prepareIndex("my-index", "my-type1", "1").setSource(source).get(); client().prepareIndex("my-index", "my-type2", "1").setSource(source).get(); logger.info("checking real time retrieval"); String field = "field1.field2.field3.field4"; GetResponse getResponse = client().prepareGet("my-index", "my-type1", "1").setFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); getResponse = client().prepareGet("my-index", "my-type2", "1").setFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); logger.info("waiting for recoveries to complete"); // Flush fails if shard has ongoing recoveries, make sure the cluster is settled down ensureGreen(); logger.info("flushing"); FlushResponse flushResponse = client().admin().indices().prepareFlush("my-index").setForce(true).get(); if (flushResponse.getSuccessfulShards() == 0) { StringBuilder sb = new StringBuilder("failed to flush at least one shard. total shards [") .append(flushResponse.getTotalShards()).append("], failed shards: [").append(flushResponse.getFailedShards()).append("]"); for (ShardOperationFailedException failure: flushResponse.getShardFailures()) { sb.append("\nShard failure: ").append(failure); } fail(sb.toString()); } logger.info("checking post-flush retrieval"); getResponse = client().prepareGet("my-index", "my-type1", "1").setFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); getResponse = client().prepareGet("my-index", "my-type2", "1").setFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); } @Test public void testGet_allField() throws Exception { assertAcked(prepareCreate("test") .addAlias(new Alias("alias")) .addMapping("my-type1", jsonBuilder() .startObject() .startObject("my-type1") .startObject("_all") .field("store", true) .endObject() .startObject("properties") .startObject("some_field") .field("type", "string") .endObject() .endObject() .endObject() .endObject())); index("test", "my-type1", "1", "some_field", "some text"); refresh(); GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("_all").get(); assertNotNull(getResponse.getField("_all").getValue()); assertThat(getResponse.getField("_all").getValue().toString(), equalTo("some text" + " ")); } @Test public void testUngeneratedFieldsThatAreNeverStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.disable_flush\": true,\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"properties\": {\n" + " \"suggest\": {\n" + " \"type\": \"completion\"\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"suggest\": {\n" + " \"input\": [\n" + " \"Nevermind\",\n" + " \"Nirvana\"\n" + " ],\n" + " \"output\": \"Nirvana - Nevermind\"\n" + " }\n" + "}"; index("test", "doc", "1", doc); String[] fieldsList = {"suggest"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } @Test public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.disable_flush\": true,\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"parentdoc\": {},\n" + " \"doc\": {\n" + " \"_parent\": {\n" + " \"type\": \"parentdoc\"\n" + " },\n" + " \"_ttl\": {\n" + " \"enabled\": true\n" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").setTTL(TimeValue.timeValueHours(1).getMillis()).get(); String[] fieldsList = {"_ttl", "_parent"}; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); } @Test public void testUngeneratedFieldsPartOfSourceUnstoredSourceDisabledBackcompat() throws IOException { indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(false, false); String[] fieldsList = {}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } @Test public void testUngeneratedFieldsPartOfSourceEitherStoredOrSourceEnabledBackcompat() throws IOException { boolean stored = randomBoolean(); boolean sourceEnabled = true; if (stored) { sourceEnabled = randomBoolean(); } indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(stored, sourceEnabled); String[] fieldsList = {}; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); } void indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(boolean stored, boolean sourceEnabled) { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.disable_flush\": true,\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"_source\": {\n" + " \"enabled\": " + sourceEnabled + "\n" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"my_boost\": 5.0,\n" + " \"_ttl\": \"1h\"\n" + "}\n"; client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get(); } @Test public void testUngeneratedFieldsNotPartOfSourceUnstored() throws IOException { indexSingleDocumentWithUngeneratedFieldsThatAreNeverPartOf_source(false, randomBoolean()); String[] fieldsList = {"_timestamp"}; String[] alwaysStoredFieldsList = {"_routing", "_size"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList, "1"); assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", alwaysStoredFieldsList, "1"); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList, "1"); assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", alwaysStoredFieldsList, "1"); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList, "1"); assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", alwaysStoredFieldsList, "1"); } @Test public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { indexSingleDocumentWithUngeneratedFieldsThatAreNeverPartOf_source(true, randomBoolean()); String[] fieldsList = {"_timestamp", "_size", "_routing"}; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); } void indexSingleDocumentWithUngeneratedFieldsThatAreNeverPartOf_source(boolean stored, boolean sourceEnabled) { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.disable_flush\": true,\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"parentdoc\": {},\n" + " \"doc\": {\n" + " \"_timestamp\": {\n" + " \"store\": \"" + storedString + "\",\n" + " \"enabled\": true\n" + " },\n" + " \"_size\": {\n" + " \"enabled\": true\n" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"text\": \"some text.\"\n" + "}\n"; client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get(); } @Test public void testGeneratedStringFieldsUnstored() throws IOException { indexSingleDocumentWithStringFieldsGeneratedFromText(false, randomBoolean()); String[] fieldsList = {"_all", "_field_names"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } @Test public void testGeneratedStringFieldsStored() throws IOException { indexSingleDocumentWithStringFieldsGeneratedFromText(true, randomBoolean()); String[] fieldsList = {"_all"}; String[] alwaysNotStoredFieldsList = {"_field_names"}; // before refresh - document is only in translog assertGetFieldsNull(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsException(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsNull(indexOrAlias(), "doc", "1", alwaysNotStoredFieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsNull(indexOrAlias(), "doc", "1", alwaysNotStoredFieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsNull(indexOrAlias(), "doc", "1", alwaysNotStoredFieldsList); } void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.disable_flush\": true,\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"_source\" : {\"enabled\" : " + sourceEnabled + "}," + " \"_all\" : {\"enabled\" : true, \"store\":\"" + storedString + "\" }" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"text1\": \"some text.\"\n," + " \"text2\": \"more text.\"\n" + "}\n"; index("test", "doc", "1", doc); } @Test public void testGeneratedNumberFieldsUnstored() throws IOException { indexSingleDocumentWithNumericFieldsGeneratedFromText(false, randomBoolean()); String[] fieldsList = {"token_count", "text.token_count", "murmur", "text.murmur"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } @Test public void testGeneratedNumberFieldsStored() throws IOException { indexSingleDocumentWithNumericFieldsGeneratedFromText(true, randomBoolean()); String[] fieldsList = {"token_count", "text.token_count", "murmur", "text.murmur"}; // before refresh - document is only in translog assertGetFieldsNull(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsException(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); } void indexSingleDocumentWithNumericFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { String storedString = stored ? "yes" : "no"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.disable_flush\": true,\n" + " \"refresh_interval\": \"-1\",\n" + " \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"_source\" : {\"enabled\" : " + sourceEnabled + "}," + " \"properties\": {\n" + " \"token_count\": {\n" + " \"type\": \"token_count\",\n" + " \"analyzer\": \"standard\",\n" + " \"store\": \"" + storedString + "\"" + " },\n" + " \"murmur\": {\n" + " \"type\": \"murmur3\",\n" + " \"store\": \"" + storedString + "\"" + " },\n" + " \"text\": {\n" + " \"type\": \"string\",\n" + " \"fields\": {\n" + " \"token_count\": {\n" + " \"type\": \"token_count\",\n" + " \"analyzer\": \"standard\",\n" + " \"store\": \"" + storedString + "\"" + " },\n" + " \"murmur\": {\n" + " \"type\": \"murmur3\",\n" + " \"store\": \"" + storedString + "\"" + " }\n" + " }\n" + " }" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"murmur\": \"Some value that can be hashed\",\n" + " \"token_count\": \"A text with five words.\",\n" + " \"text\": \"A text with five words.\"\n" + "}\n"; index("test", "doc", "1", doc); } private void assertGetFieldsAlwaysWorks(String index, String type, String docId, String[] fields) { assertGetFieldsAlwaysWorks(index, type, docId, fields, null); } private void assertGetFieldsAlwaysWorks(String index, String type, String docId, String[] fields, @Nullable String routing) { for (String field : fields) { assertGetFieldWorks(index, type, docId, field, false, routing); assertGetFieldWorks(index, type, docId, field, true, routing); } } private void assertGetFieldWorks(String index, String type, String docId, String field, boolean ignoreErrors, @Nullable String routing) { GetResponse response = getDocument(index, type, docId, field, ignoreErrors, routing); assertThat(response.getId(), equalTo(docId)); assertTrue(response.isExists()); assertNotNull(response.getField(field)); response = multiGetDocument(index, type, docId, field, ignoreErrors, routing); assertThat(response.getId(), equalTo(docId)); assertTrue(response.isExists()); assertNotNull(response.getField(field)); } protected void assertGetFieldsException(String index, String type, String docId, String[] fields) { for (String field : fields) { assertGetFieldException(index, type, docId, field); } } private void assertGetFieldException(String index, String type, String docId, String field) { try { client().prepareGet().setIndex(index).setType(type).setId(docId).setFields(field).setIgnoreErrorsOnGeneratedFields(false).get(); fail(); } catch (ElasticsearchException e) { assertTrue(e.getMessage().contains("You can only get this field after refresh() has been called.")); } MultiGetResponse multiGetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item(index, type, docId).fields(field)).setIgnoreErrorsOnGeneratedFields(false).get(); assertNull(multiGetResponse.getResponses()[0].getResponse()); assertTrue(multiGetResponse.getResponses()[0].getFailure().getMessage().contains("You can only get this field after refresh() has been called.")); } protected void assertGetFieldsNull(String index, String type, String docId, String[] fields) { assertGetFieldsNull(index, type, docId, fields, null); } protected void assertGetFieldsNull(String index, String type, String docId, String[] fields, @Nullable String routing) { for (String field : fields) { assertGetFieldNull(index, type, docId, field, true, routing); } } protected void assertGetFieldsAlwaysNull(String index, String type, String docId, String[] fields) { assertGetFieldsAlwaysNull(index, type, docId, fields, null); } protected void assertGetFieldsAlwaysNull(String index, String type, String docId, String[] fields, @Nullable String routing) { for (String field : fields) { assertGetFieldNull(index, type, docId, field, true, routing); assertGetFieldNull(index, type, docId, field, false, routing); } } protected void assertGetFieldNull(String index, String type, String docId, String field, boolean ignoreErrors, @Nullable String routing) { //for get GetResponse response = getDocument(index, type, docId, field, ignoreErrors, routing); assertTrue(response.isExists()); assertNull(response.getField(field)); assertThat(response.getId(), equalTo(docId)); //same for multi get response = multiGetDocument(index, type, docId, field, ignoreErrors, routing); assertNull(response.getField(field)); assertThat(response.getId(), equalTo(docId)); assertTrue(response.isExists()); } private GetResponse multiGetDocument(String index, String type, String docId, String field, boolean ignoreErrors, @Nullable String routing) { MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).fields(field); if (routing != null) { getItem.routing(routing); } MultiGetRequestBuilder multiGetRequestBuilder = client().prepareMultiGet().add(getItem).setIgnoreErrorsOnGeneratedFields(ignoreErrors); MultiGetResponse multiGetResponse = multiGetRequestBuilder.get(); assertThat(multiGetResponse.getResponses().length, equalTo(1)); return multiGetResponse.getResponses()[0].getResponse(); } private GetResponse getDocument(String index, String type, String docId, String field, boolean ignoreErrors, @Nullable String routing) { GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setFields(field).setIgnoreErrorsOnGeneratedFields(ignoreErrors); if (routing != null) { getRequestBuilder.setRouting(routing); } return getRequestBuilder.get(); } }
package org.grobid.trainer; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.LinkedList; import java.util.List; import java.util.StringTokenizer; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.grobid.core.GrobidModels; import org.grobid.core.exceptions.GrobidException; import org.grobid.core.features.FeaturesVectorReference; import org.grobid.core.sax.MarecSaxParser; import org.grobid.core.utilities.GrobidProperties; import org.grobid.core.utilities.OffsetPosition; import org.grobid.core.utilities.TextUtilities; import org.grobid.trainer.evaluation.PatentEvaluation; /** * @author Patrice Lopez */ public class PatentParserTrainer extends AbstractTrainer { // the window value indicate the right and left context of text to consider for an annotation when building // the training or the test data - the value is experimentally set // this window is used to maintain a certain level of occurence of the patent and NPL references, and avoid // to have the citation annotation diluted because they are very rare (less than 1 token per 1000) private static final int trainWindow = 200; public PatentParserTrainer() { super(GrobidModels.PATENT_PATENT); // adjusting CRF training parameters for this model (only with Wapiti) epsilon = 0.0001; window = 20; } public int createTrainingData(String trainingDataDir) { int nb = 0; try { String path = new File(new File(getFilePath2Resources(), "dataset/patent/corpus/").getAbsolutePath()).getAbsolutePath(); createDataSet(null, null, path, trainingDataDir, 0); } catch (Exception e) { throw new GrobidException("An exception occurred while training Grobid.", e); } return nb; } // we have our own train() method that trains several models at once, // therefore we don't need these methods which typically // were executed from AbstractTrainer. @Override public int createCRFPPData(File corpusPath, File outputFile) { return 0; } @Override public int createCRFPPData(File corpusPath, File outputTrainingFile, File outputEvalFile, double splitRatio) { return 0; } public void train() { createTrainingData(GrobidProperties.getTempPath().getAbsolutePath()); // String path = new File(new File("resources/dataset/patent/crfpp-templates/").getAbsolutePath()).getAbsolutePath(); // train the resulting training files with features (based external command line, no JNI // binding for the training functions of CRF++) //File trainingDataPath1 = new File(GrobidProperties.getTempPath() + "/npl.train"); //File trainingDataPath2 = new File(GrobidProperties.getTempPath() + "/patent.train"); File trainingDataPath3 = new File(GrobidProperties.getTempPath() + "/all.train"); // File templatePath1 = new File(getFilePath2Resources(), "dataset/patent/crfpp-templates/text.npl.references.template"); //File templatePath2 = new File(getFilePath2Resources(), "dataset/patent/crfpp-templates/text.patent.references.template"); File templatePath3 = new File(getFilePath2Resources(), "dataset/patent/crfpp-templates/text.references.template"); GenericTrainer trainer = TrainerFactory.getTrainer(); trainer.setEpsilon(epsilon); trainer.setWindow(window); //File modelPath1 = new File(GrobidProperties.getModelPath(GrobidModels.PATENT_NPL).getAbsolutePath() + NEW_MODEL_EXT); //File modelPath2 = new File(GrobidProperties.getModelPath(GrobidModels.PATENT_PATENT).getAbsolutePath() + NEW_MODEL_EXT); File modelPath3 = new File(GrobidProperties.getModelPath(GrobidModels.PATENT_ALL).getAbsolutePath() + NEW_MODEL_EXT); //trainer.train(templatePath1, trainingDataPath1, modelPath1, GrobidProperties.getNBThreads()); //trainer.train(templatePath2, trainingDataPath2, modelPath2, GrobidProperties.getNBThreads()); trainer.train(templatePath3, trainingDataPath3, modelPath3, GrobidProperties.getNBThreads(), model); //renaming //renameModels(GrobidProperties.getModelPath(GrobidModels.PATENT_NPL), modelPath1); //renameModels(GrobidProperties.getModelPath(GrobidModels.PATENT_PATENT), modelPath2); renameModels(GrobidProperties.getModelPath(GrobidModels.PATENT_ALL), modelPath3); } /** * Create the set of training and evaluation sets from the annotated examples with * extraction of citations in the patent description body. * * @param rank rank associated to the set for n-fold data generation * @param type type of data to be created, 0 is training data, 1 is evaluation data */ public void createDataSet(String setName, String rank, String corpusPath, String outputPath, int type) { int nbFiles = 0; int nbNPLRef = 0; int nbPatentRef = 0; int maxRef = 0; try { // PATENT REF. textual data // we use a SAX parser on the patent XML files MarecSaxParser sax = new MarecSaxParser(); sax.patentReferences = true; sax.nplReferences = false; int srCitations = 0; int previousSrCitations = 0; int withSR = 0; List<OffsetPosition> journalsPositions = null; List<OffsetPosition> abbrevJournalsPositions = null; List<OffsetPosition> conferencesPositions = null; List<OffsetPosition> publishersPositions = null; if (type == 0) { // training set sax.setN(trainWindow); } else { // for the test set we enlarge the focus window to include all the document. sax.setN(-1); } // get a factory /*SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setValidating(false); spf.setFeature("http://xml.org/sax/features/namespaces", false); spf.setFeature("http://xml.org/sax/features/validation", false); LinkedList<File> fileList = new LinkedList<File>(); if (setName == null) { fileList.add(new File(corpusPath)); } else if (rank == null) { fileList.add(new File(corpusPath)); } else { // n-fold evaluation fileList.add(new File(corpusPath + File.separator + setName + "ing" + rank + File.separator)); } Writer writer = null; if ((setName == null) || (setName.length() == 0)) { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + "/patent.train"), false), "UTF-8"); } else if (rank == null) { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + "/patent." + setName), false), "UTF-8"); } else { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + setName + "ing" + rank + "/patent." + setName), false), "UTF-8"); } while (fileList.size() > 0) { File file = fileList.removeFirst(); if (file.isDirectory()) { for (File subFile : file.listFiles()) fileList.addLast(subFile); } else { if (file.getName().endsWith(".xml")) { nbFiles++; System.out.println(file.getAbsolutePath()); try { //get a new instance of parser SAXParser p = spf.newSAXParser(); FileInputStream in = new FileInputStream(file); sax.setFileName(file.getName()); p.parse(in, sax); //writer1.write("\n"); nbPatentRef += sax.getNbPatentRef(); if (sax.citations != null) { if (sax.citations.size() > previousSrCitations) { previousSrCitations = sax.citations.size(); withSR++; } } journalsPositions = sax.journalsPositions; abbrevJournalsPositions = sax.abbrevJournalsPositions; conferencesPositions = sax.conferencesPositions; publishersPositions = sax.publishersPositions; if (sax.accumulatedText != null) { String text = sax.accumulatedText.toString(); if (text.trim().length() > 0) { // add features for the patent tokens addFeatures(text, writer, journalsPositions, abbrevJournalsPositions, conferencesPositions, publishersPositions); writer.write("\n \n"); } } } catch (Exception e) { throw new GrobidException("An exception occured while running Grobid.", e); } } } }*/ // NPL REF. textual data /*sax = new MarecSaxParser(); sax.patentReferences = false; sax.nplReferences = true; if (type == 0) { // training set sax.setN(trainWindow); } else { // for the test set we enlarge the focus window to include all the document. sax.setN(-1); } // get a factory spf = SAXParserFactory.newInstance(); spf.setValidating(false); spf.setFeature("http://xml.org/sax/features/namespaces", false); spf.setFeature("http://xml.org/sax/features/validation", false); fileList = new LinkedList<File>(); if (setName == null) { fileList.add(new File(corpusPath)); } else if (rank == null) { fileList.add(new File(corpusPath)); } else { fileList.add(new File(corpusPath + File.separator + setName + "ing" + rank + File.separator)); } if ((setName == null) || (setName.length() == 0)) { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + "/npl.train"), false), "UTF-8"); } else if (rank == null) { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + "/npl." + setName), false), "UTF-8"); } else { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + File.separator + setName + "ing" + rank + File.separator + "npl." + setName), false), "UTF-8"); } while (fileList.size() > 0) { File file = fileList.removeFirst(); if (file.isDirectory()) { for (File subFile : file.listFiles()) fileList.addLast(subFile); } else { if (file.getName().endsWith(".xml")) { //nbFiles++; //String text = Files.readFromFile(file,"UTF-8"); try { //get a new instance of parser SAXParser p = spf.newSAXParser(); FileInputStream in = new FileInputStream(file); sax.setFileName(file.toString()); p.parse(in, sax); //writer2.write("\n"); nbNPLRef += sax.getNbNPLRef(); if (sax.nbAllRef > maxRef) { maxRef = sax.nbAllRef; } if (sax.citations != null) { if (sax.citations.size() > previousSrCitations) { previousSrCitations = sax.citations.size(); withSR++; } } journalsPositions = sax.journalsPositions; abbrevJournalsPositions = sax.abbrevJournalsPositions; conferencesPositions = sax.conferencesPositions; publishersPositions = sax.publishersPositions; //totalLength += sax.totalLength; if (sax.accumulatedText != null) { String text = sax.accumulatedText.toString(); // add features for NPL addFeatures(text, writer, journalsPositions, abbrevJournalsPositions, conferencesPositions, publishersPositions); writer.write("\n"); } } catch (Exception e) { throw new GrobidException("An exception occured while running Grobid.", e); } } } } if (sax.citations != null) srCitations += sax.citations.size();*/ // Patent + NPL REF. textual data (the "all" model) sax = new MarecSaxParser(); sax.patentReferences = true; sax.nplReferences = true; if (type == 0) { // training set sax.setN(trainWindow); } else { // for the test set we enlarge the focus window to include all the document. sax.setN(-1); } // get a factory SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setValidating(false); spf.setFeature("http://xml.org/sax/features/namespaces", false); spf.setFeature("http://xml.org/sax/features/validation", false); LinkedList<File> fileList = new LinkedList<File>(); if (setName == null) { fileList.add(new File(corpusPath)); } else if (rank == null) { fileList.add(new File(corpusPath)); } else { fileList.add(new File(corpusPath + File.separator + setName + "ing" + rank + File.separator)); } Writer writer = null; if ((setName == null) || (setName.length() == 0)) { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + File.separator + "all.train"), false), "UTF-8"); } else if (rank == null) { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + File.separator + "all." + setName), false), "UTF-8"); } else { writer = new OutputStreamWriter(new FileOutputStream( new File(outputPath + File.separator + setName + "ing" + rank + File.separator + "all." + setName), false), "UTF-8"); } //int totalLength = 0; while (fileList.size() > 0) { File file = fileList.removeFirst(); if (file.isDirectory()) { for (File subFile : file.listFiles()) { fileList.addLast(subFile); } } else { if (file.getName().endsWith(".xml")) { nbFiles++; try { //get a new instance of parser SAXParser p = spf.newSAXParser(); FileInputStream in = new FileInputStream(file); sax.setFileName(file.toString()); p.parse(in, sax); //writer3.write("\n"); nbNPLRef += sax.getNbNPLRef(); nbPatentRef += sax.getNbPatentRef(); if (sax.nbAllRef > maxRef) { maxRef = sax.nbAllRef; } if (sax.citations != null) { if (sax.citations.size() > previousSrCitations) { previousSrCitations = sax.citations.size(); withSR++; } } journalsPositions = sax.journalsPositions; abbrevJournalsPositions = sax.abbrevJournalsPositions; conferencesPositions = sax.conferencesPositions; publishersPositions = sax.publishersPositions; //totalLength += sax.totalLength; if (sax.accumulatedText != null) { String text = sax.accumulatedText.toString(); // add features for patent+NPL addFeatures(text, writer, journalsPositions, abbrevJournalsPositions, conferencesPositions, publishersPositions); writer.write("\n"); } } catch (Exception e) { throw new GrobidException("An exception occured while running Grobid.", e); } } } } if (sax.citations != null) { srCitations += sax.citations.size(); } if (setName != null) { System.out.println(setName + "ing on " + nbFiles + " files"); } else { System.out.println("training on " + nbFiles + " files"); } //System.out.println("Number of file with search report: " + withSR); System.out.println("Number of references: " + (nbNPLRef + nbPatentRef)); System.out.println("Number of patent references: " + nbPatentRef); System.out.println("Number of NPL references: " + nbNPLRef); //System.out.println("Number of search report citations: " + srCitations); System.out.println("Average number of references: " + TextUtilities.formatTwoDecimals((double) (nbNPLRef + nbPatentRef) / nbFiles)); System.out.println("Max number of references in file: " + maxRef); /*if ((setName == null) || (setName.length() == 0)) { System.out.println("patent data set under: " + outputPath + "/patent.train"); } else { System.out.println("patent data set under: " + outputPath + "/patent." + setName); } if ((setName == null) || (setName.length() == 0)) { System.out.println("npl data set under: " + outputPath + "/npl.train"); } else { System.out.println("npl data set under: " + outputPath + "/npl." + setName); }*/ if ((setName == null) || (setName.length() == 0)) { System.out.println("common data set under: " + outputPath + "/all.train"); } else { System.out.println("common data set under: " + outputPath + "/all." + setName); } } catch (Exception e) { throw new GrobidException("An exception occurred while running Grobid.", e); } } public void addFeatures(String text, Writer writer, List<OffsetPosition> journalPositions, List<OffsetPosition> abbrevJournalPositions, List<OffsetPosition> conferencePositions, List<OffsetPosition> publisherPositions) { try { String line; StringTokenizer st = new StringTokenizer(text, "\n"); int totalLine = st.countTokens(); int posit = 0; int currentJournalPositions = 0; int currentAbbrevJournalPositions = 0; int currentConferencePositions = 0; int currentPublisherPositions = 0; boolean isJournalToken; boolean isAbbrevJournalToken; boolean isConferenceToken; boolean isPublisherToken; boolean skipTest; while (st.hasMoreTokens()) { isJournalToken = false; isAbbrevJournalToken = false; isConferenceToken = false; isPublisherToken = false; skipTest = false; line = st.nextToken(); if (line.trim().length() == 0) { writer.write("\n"); posit = 0; continue; } else if (line.endsWith("\t<ignore>")) { posit++; continue; } // check the position of matches for journals if (journalPositions != null) { if (currentJournalPositions == journalPositions.size() - 1) { if (journalPositions.get(currentJournalPositions).end < posit) { skipTest = true; } } if (!skipTest) { for (int i = currentJournalPositions; i < journalPositions.size(); i++) { if ((journalPositions.get(i).start <= posit) && (journalPositions.get(i).end >= posit)) { isJournalToken = true; currentJournalPositions = i; break; } else if (journalPositions.get(i).start > posit) { isJournalToken = false; currentJournalPositions = i; break; } } } } // check the position of matches for abbreviated journals skipTest = false; if (abbrevJournalPositions != null) { if (currentAbbrevJournalPositions == abbrevJournalPositions.size() - 1) { if (abbrevJournalPositions.get(currentAbbrevJournalPositions).end < posit) { skipTest = true; } } if (!skipTest) { for (int i = currentAbbrevJournalPositions; i < abbrevJournalPositions.size(); i++) { if ((abbrevJournalPositions.get(i).start <= posit) && (abbrevJournalPositions.get(i).end >= posit)) { isAbbrevJournalToken = true; currentAbbrevJournalPositions = i; break; } else if (abbrevJournalPositions.get(i).start > posit) { isAbbrevJournalToken = false; currentAbbrevJournalPositions = i; break; } } } } // check the position of matches for conferences skipTest = false; if (conferencePositions != null) { if (currentConferencePositions == conferencePositions.size() - 1) { if (conferencePositions.get(currentConferencePositions).end < posit) { skipTest = true; } } if (!skipTest) { for (int i = currentConferencePositions; i < conferencePositions.size(); i++) { if ((conferencePositions.get(i).start <= posit) && (conferencePositions.get(i).end >= posit)) { isConferenceToken = true; currentConferencePositions = i; break; } else if (conferencePositions.get(i).start > posit) { isConferenceToken = false; currentConferencePositions = i; break; } } } } // check the position of matches for publishers skipTest = false; if (publisherPositions != null) { if (currentPublisherPositions == publisherPositions.size() - 1) { if (publisherPositions.get(currentPublisherPositions).end < posit) { skipTest = true; } } if (!skipTest) { for (int i = currentPublisherPositions; i < publisherPositions.size(); i++) { if ((publisherPositions.get(i).start <= posit) && (publisherPositions.get(i).end >= posit)) { isPublisherToken = true; currentPublisherPositions = i; break; } else if (publisherPositions.get(i).start > posit) { isPublisherToken = false; currentPublisherPositions = i; break; } } } } FeaturesVectorReference featuresVector = FeaturesVectorReference.addFeaturesPatentReferences(line, totalLine, posit, isJournalToken, isAbbrevJournalToken, isConferenceToken, isPublisherToken); if (featuresVector.label == null) continue; writer.write(featuresVector.printVector()); writer.flush(); posit++; } } catch (Exception e) { throw new GrobidException("An exception occurred while running Grobid.", e); } } @Override public String evaluate() { //parameter 2 was in the former main() method of ParentEvaluation return new PatentEvaluation().evaluate(); } /** * Command line execution. * * @param args Command line arguments. * @throws Exception */ public static void main(String[] args) throws Exception { GrobidProperties.getInstance(); AbstractTrainer.runTraining(new PatentParserTrainer()); System.exit(0); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4.http.springboot; import io.netty.util.concurrent.EventExecutorGroup; import org.apache.camel.LoggingLevel; import org.apache.camel.component.netty4.http.NettyHttpBinding; import org.apache.camel.component.netty4.http.SecurityAuthenticator; import org.apache.camel.component.netty4.http.SecurityConstraint; import org.apache.camel.spi.HeaderFilterStrategy; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.NestedConfigurationProperty; /** * Netty HTTP server and client using the Netty 4.x library. * * Generated by camel-package-maven-plugin - do not edit this file! */ @ConfigurationProperties(prefix = "camel.component.netty4-http") public class NettyHttpComponentConfiguration { /** * To use a custom org.apache.camel.component.netty4.http.NettyHttpBinding * for binding to/from Netty and Camel Message API. */ @NestedConfigurationProperty private NettyHttpBinding nettyHttpBinding; /** * To use the NettyConfiguration as configuration when creating endpoints. */ private NettyHttpConfigurationNestedConfiguration configuration; /** * To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter * headers. */ @NestedConfigurationProperty private HeaderFilterStrategy headerFilterStrategy; /** * Refers to a * org.apache.camel.component.netty4.http.NettyHttpSecurityConfiguration for * configuring secure web resources. */ private NettyHttpSecurityConfigurationNestedConfiguration securityConfiguration; /** * The thread pool size for the EventExecutorGroup if its in use. The * default value is 16. */ private Integer maximumPoolSize = 16; /** * To use the given EventExecutorGroup */ @NestedConfigurationProperty private EventExecutorGroup executorService; /** * Whether the component should resolve property placeholders on itself when * starting. Only properties which are of String type can use property * placeholders. */ private Boolean resolvePropertyPlaceholders = true; public NettyHttpBinding getNettyHttpBinding() { return nettyHttpBinding; } public void setNettyHttpBinding(NettyHttpBinding nettyHttpBinding) { this.nettyHttpBinding = nettyHttpBinding; } public NettyHttpConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( NettyHttpConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } public void setHeaderFilterStrategy( HeaderFilterStrategy headerFilterStrategy) { this.headerFilterStrategy = headerFilterStrategy; } public NettyHttpSecurityConfigurationNestedConfiguration getSecurityConfiguration() { return securityConfiguration; } public void setSecurityConfiguration( NettyHttpSecurityConfigurationNestedConfiguration securityConfiguration) { this.securityConfiguration = securityConfiguration; } public Integer getMaximumPoolSize() { return maximumPoolSize; } public void setMaximumPoolSize(Integer maximumPoolSize) { this.maximumPoolSize = maximumPoolSize; } public EventExecutorGroup getExecutorService() { return executorService; } public void setExecutorService(EventExecutorGroup executorService) { this.executorService = executorService; } public Boolean getResolvePropertyPlaceholders() { return resolvePropertyPlaceholders; } public void setResolvePropertyPlaceholders( Boolean resolvePropertyPlaceholders) { this.resolvePropertyPlaceholders = resolvePropertyPlaceholders; } public static class NettyHttpConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.netty4.http.NettyHttpConfiguration.class; /** * The protocol to use which is either http or https */ private String protocol; /** * The local hostname such as localhost, or 0.0.0.0 when being a * consumer. The remote HTTP server hostname when using producer. */ private String host; /** * The port number. Is default 80 for http and 443 for https. */ private Integer port; /** * Allow using gzip/deflate for compression on the Netty HTTP server if * the client supports it from the HTTP headers. */ private Boolean compression; /** * Option to disable throwing the HttpOperationFailedException in case * of failed responses from the remote server. This allows you to get * all responses regardless of the HTTP status code. */ private Boolean throwExceptionOnFailure; /** * If enabled and an Exchange failed processing on the consumer side, * and if the caused Exception was send back serialized in the response * as a application/x-java-serialized-object content type. On the * producer side the exception will be deserialized and thrown as is, * instead of the HttpOperationFailedException. The caused exception is * required to be serialized. * <p/> * This is by default turned off. If you enable this then be aware that * Java will deserialize the incoming data from the request to Java and * that can be a potential security risk. */ private Boolean transferException; /** * If this option is enabled, then during binding from Netty to Camel * Message then the header values will be URL decoded (eg %20 will be a * space character. Notice this option is used by the default * org.apache.camel.component.netty.http.NettyHttpBinding and therefore * if you implement a custom * org.apache.camel.component.netty4.http.NettyHttpBinding then you * would need to decode the headers accordingly to this option. */ private Boolean urlDecodeHeaders; /** * If this option is enabled, then during binding from Netty to Camel * Message then the headers will be mapped as well (eg added as header * to the Camel Message as well). You can turn off this option to * disable this. The headers can still be accessed from the * org.apache.camel.component.netty.http.NettyHttpMessage message with * the method getHttpRequest() that returns the Netty HTTP request * io.netty.handler.codec.http.HttpRequest instance. */ private Boolean mapHeaders; /** * Whether or not Camel should try to find a target consumer by matching * the URI prefix if no exact match is found. */ private Boolean matchOnUriPrefix; /** * If the option is true, the producer will ignore the Exchange.HTTP_URI * header, and use the endpoint's URI for request. You may also set the * throwExceptionOnFailure to be false to let the producer send all the * fault response back. The consumer working in the bridge mode will * skip the gzip compression and WWW URL form encoding (by adding the * Exchange.SKIP_GZIP_ENCODING and Exchange.SKIP_WWW_FORM_URLENCODED * headers to the consumed exchange). */ private Boolean bridgeEndpoint; /** * Resource path */ private String path; /** * Determines whether or not the raw input stream from Netty * HttpRequest#getContent() or HttpResponset#getContent() is cached or * not (Camel will read the stream into a in light-weight memory based * Stream caching) cache. By default Camel will cache the Netty input * stream to support reading it multiple times to ensure it Camel can * retrieve all data from the stream. However you can set this option to * true when you for example need to access the raw stream, such as * streaming it directly to a file or other persistent store. Mind that * if you enable this option, then you cannot read the Netty stream * multiple times out of the box, and you would need manually to reset * the reader index on the Netty raw stream. Also Netty will auto-close * the Netty stream when the Netty HTTP server/HTTP client is done * processing, which means that if the asynchronous routing engine is in * use then any asynchronous thread that may continue routing the * {@link org.apache.camel.Exchange} may not be able to read the Netty * stream, because Netty has closed it. */ private Boolean disableStreamCache; /** * Whether to send back HTTP status code 503 when the consumer has been * suspended. If the option is false then the Netty Acceptor is unbound * when the consumer is suspended, so clients cannot connect anymore. */ private Boolean send503whenSuspended; /** * Value in bytes the max content length per chunked frame received on * the Netty HTTP server. */ private Integer chunkedMaxContentLength; /** * The maximum length of all headers. If the sum of the length of each * header exceeds this value, a * {@link io.netty.handler.codec.TooLongFrameException} will be raised. */ private Integer maxHeaderSize; private Boolean allowDefaultCodec; /** * The status codes which is considered a success response. The values * are inclusive. The range must be defined as from-to with the dash * included. * <p/> * The default range is <tt>200-299</tt> */ private String okStatusCodeRange = "200-299"; /** * Sets whether to use a relative path in HTTP requests. */ private Boolean useRelativePath; public String getProtocol() { return protocol; } public void setProtocol(String protocol) { this.protocol = protocol; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public Integer getPort() { return port; } public void setPort(Integer port) { this.port = port; } public Boolean getCompression() { return compression; } public void setCompression(Boolean compression) { this.compression = compression; } public Boolean getThrowExceptionOnFailure() { return throwExceptionOnFailure; } public void setThrowExceptionOnFailure(Boolean throwExceptionOnFailure) { this.throwExceptionOnFailure = throwExceptionOnFailure; } public Boolean getTransferException() { return transferException; } public void setTransferException(Boolean transferException) { this.transferException = transferException; } public Boolean getUrlDecodeHeaders() { return urlDecodeHeaders; } public void setUrlDecodeHeaders(Boolean urlDecodeHeaders) { this.urlDecodeHeaders = urlDecodeHeaders; } public Boolean getMapHeaders() { return mapHeaders; } public void setMapHeaders(Boolean mapHeaders) { this.mapHeaders = mapHeaders; } public Boolean getMatchOnUriPrefix() { return matchOnUriPrefix; } public void setMatchOnUriPrefix(Boolean matchOnUriPrefix) { this.matchOnUriPrefix = matchOnUriPrefix; } public Boolean getBridgeEndpoint() { return bridgeEndpoint; } public void setBridgeEndpoint(Boolean bridgeEndpoint) { this.bridgeEndpoint = bridgeEndpoint; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public Boolean getDisableStreamCache() { return disableStreamCache; } public void setDisableStreamCache(Boolean disableStreamCache) { this.disableStreamCache = disableStreamCache; } public Boolean getSend503whenSuspended() { return send503whenSuspended; } public void setSend503whenSuspended(Boolean send503whenSuspended) { this.send503whenSuspended = send503whenSuspended; } public Integer getChunkedMaxContentLength() { return chunkedMaxContentLength; } public void setChunkedMaxContentLength(Integer chunkedMaxContentLength) { this.chunkedMaxContentLength = chunkedMaxContentLength; } public Integer getMaxHeaderSize() { return maxHeaderSize; } public void setMaxHeaderSize(Integer maxHeaderSize) { this.maxHeaderSize = maxHeaderSize; } public Boolean getAllowDefaultCodec() { return allowDefaultCodec; } public void setAllowDefaultCodec(Boolean allowDefaultCodec) { this.allowDefaultCodec = allowDefaultCodec; } public String getOkStatusCodeRange() { return okStatusCodeRange; } public void setOkStatusCodeRange(String okStatusCodeRange) { this.okStatusCodeRange = okStatusCodeRange; } public Boolean getUseRelativePath() { return useRelativePath; } public void setUseRelativePath(Boolean useRelativePath) { this.useRelativePath = useRelativePath; } } public static class NettyHttpSecurityConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.netty4.http.NettyHttpSecurityConfiguration.class; /** * Whether to enable authentication * <p/> * This is by default enabled. */ private Boolean authenticate; /** * The supported restricted. * <p/> * Currently only Basic is supported. */ private String constraint; /** * Sets the name of the realm to use. */ private String realm; /** * Sets a {@link SecurityConstraint} to use for checking if a web * resource is restricted or not * <p/> * By default this is <tt>null</tt>, which means all resources is * restricted. */ private SecurityConstraint securityConstraint; /** * Sets the {@link SecurityAuthenticator} to use for authenticating the * {@link HttpPrincipal} . */ private SecurityAuthenticator securityAuthenticator; /** * Sets a logging level to use for logging denied login attempts (incl * stacktraces) * <p/> * This level is by default DEBUG. */ private LoggingLevel loginDeniedLoggingLevel; private String roleClassName; public Boolean getAuthenticate() { return authenticate; } public void setAuthenticate(Boolean authenticate) { this.authenticate = authenticate; } public String getConstraint() { return constraint; } public void setConstraint(String constraint) { this.constraint = constraint; } public String getRealm() { return realm; } public void setRealm(String realm) { this.realm = realm; } public SecurityConstraint getSecurityConstraint() { return securityConstraint; } public void setSecurityConstraint(SecurityConstraint securityConstraint) { this.securityConstraint = securityConstraint; } public SecurityAuthenticator getSecurityAuthenticator() { return securityAuthenticator; } public void setSecurityAuthenticator( SecurityAuthenticator securityAuthenticator) { this.securityAuthenticator = securityAuthenticator; } public LoggingLevel getLoginDeniedLoggingLevel() { return loginDeniedLoggingLevel; } public void setLoginDeniedLoggingLevel( LoggingLevel loginDeniedLoggingLevel) { this.loginDeniedLoggingLevel = loginDeniedLoggingLevel; } public String getRoleClassName() { return roleClassName; } public void setRoleClassName(String roleClassName) { this.roleClassName = roleClassName; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.having; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; @ElasticsearchIntegrationTest.SuiteScopeTest public class BucketSelectorTests extends ElasticsearchIntegrationTest { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; private static final String FIELD_3_NAME = "field3"; private static final String FIELD_4_NAME = "field4"; private static int interval; private static int numDocs; private static int minNumber; private static int maxNumber; @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); interval = randomIntBetween(1, 50); numDocs = randomIntBetween(10, 500); minNumber = -200; maxNumber = 200; List<IndexRequestBuilder> builders = new ArrayList<>(); for (int docs = 0; docs < numDocs; docs++) { builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); } client().preparePutIndexedScript().setId("my_script").setScriptLang(GroovyScriptEngineService.NAME) .setSource("{ \"script\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" }").get(); indexRandom(true, builders); ensureSearchable(); } private XContentBuilder newDocBuilder() throws IOException { XContentBuilder jsonBuilder = jsonBuilder(); jsonBuilder.startObject(); jsonBuilder.field(FIELD_1_NAME, randomIntBetween(minNumber, maxNumber)); jsonBuilder.field(FIELD_2_NAME, randomIntBetween(minNumber, maxNumber)); jsonBuilder.field(FIELD_3_NAME, randomIntBetween(minNumber, maxNumber)); jsonBuilder.field(FIELD_4_NAME, randomIntBetween(minNumber, maxNumber)); jsonBuilder.endObject(); return jsonBuilder; } @Test public void inlineScript() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null)))).execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } @Test public void inlineScriptNoBucketsPruned() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? true : (_value0 < 10000)", ScriptType.INLINE, null, null)))).execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, lessThan(10000.0)); } } @Test public void inlineScriptNoBucketsLeft() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? false : (_value0 > 10000)", ScriptType.INLINE, null, null)))).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(0)); } @Test public void inlineScript2() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? false : (_value0 < _value1)", ScriptType.INLINE, null, null)))).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field3SumValue - field2SumValue, greaterThan(0.0)); } } @Test public void inlineScriptSingleVariable() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum") .script(new Script("Double.isNaN(_value0) ? false : (_value0 > 100)", ScriptType.INLINE, null, null)))).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); assertThat(field2SumValue, greaterThan(100.0)); } } @Test public void inlineScriptNamedVars() { Map<String, String> bucketPathsMap = new HashMap<>(); bucketPathsMap.put("my_value1", "field2Sum"); bucketPathsMap.put("my_value2", "field3Sum"); SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPathsMap(bucketPathsMap).script( new Script("Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", ScriptType.INLINE, null, null)))).execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } @Test public void inlineScriptWithParams() { Map<String, Object> params = new HashMap<>(); params.put("threshold", 100); SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", ScriptType.INLINE, null, params)))).execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } @Test public void inlineScriptInsertZeros() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").gapPolicy(GapPolicy.INSERT_ZEROS) .script(new Script("_value0 + _value1 > 100", ScriptType.INLINE, null, null)))) .execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } @Test public void indexedScript() { SearchResponse response = client() .prepareSearch("idx") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("my_script", ScriptType.INDEXED, null, null)))).execute().actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } @Test public void unmapped() throws Exception { SearchResponse response = client() .prepareSearch("idx_unmapped") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null)))).execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> deriv = response.getAggregations().get("histo"); assertThat(deriv, notNullValue()); assertThat(deriv.getName(), equalTo("histo")); assertThat(deriv.getBuckets().size(), equalTo(0)); } @Test public void partiallyUnmapped() throws Exception { SearchResponse response = client() .prepareSearch("idx", "idx_unmapped") .addAggregation( histogram("histo") .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( having("having").setBucketsPaths("field2Sum", "field3Sum").script( new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null)))).execute() .actionGet(); assertSearchResponse(response); InternalHistogram<Bucket> histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); for (int i = 0; i < buckets.size(); ++i) { Histogram.Bucket bucket = buckets.get(i); Sum field2Sum = bucket.getAggregations().get("field2Sum"); assertThat(field2Sum, notNullValue()); double field2SumValue = field2Sum.getValue(); Sum field3Sum = bucket.getAggregations().get("field3Sum"); assertThat(field3Sum, notNullValue()); double field3SumValue = field3Sum.getValue(); assertThat(field2SumValue + field3SumValue, greaterThan(100.0)); } } }
package com.netflix.discovery.shared; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import java.io.FileInputStream; import java.io.IOException; import java.security.KeyStore; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import com.netflix.discovery.converters.wrappers.CodecWrappers; import com.netflix.discovery.provider.DiscoveryJerseyProvider; import com.netflix.servo.monitor.BasicCounter; import com.netflix.servo.monitor.BasicTimer; import com.netflix.servo.monitor.Counter; import com.netflix.servo.monitor.MonitorConfig; import com.netflix.servo.monitor.Monitors; import com.netflix.servo.monitor.Stopwatch; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.client.apache4.ApacheHttpClient4; import com.sun.jersey.client.apache4.config.ApacheHttpClient4Config; import com.sun.jersey.client.apache4.config.DefaultApacheHttpClient4Config; import org.apache.http.client.params.ClientPNames; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.params.CoreProtocolPNames; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.netflix.discovery.util.DiscoveryBuildInfo.buildVersion; /** * @author Tomasz Bak */ public class EurekaJerseyClient { private static final Logger s_logger = LoggerFactory.getLogger(EurekaJerseyClient.class); private static final int HTTP_CONNECTION_CLEANER_INTERVAL_MS = 30 * 1000; private static final String PROTOCOL = "https"; private static final String PROTOCOL_SCHEME = "SSL"; private static final int HTTPS_PORT = 443; private static final String KEYSTORE_TYPE = "JKS"; private final ApacheHttpClient4 apacheHttpClient; ClientConfig jerseyClientConfig; private final ScheduledExecutorService eurekaConnCleaner = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() { private final AtomicInteger threadNumber = new AtomicInteger(1); @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r, "Eureka-JerseyClient-Conn-Cleaner" + threadNumber.incrementAndGet()); thread.setDaemon(true); return thread; } }); public EurekaJerseyClient(int connectionTimeout, int readTimeout, final int connectionIdleTimeout, ClientConfig clientConfig) { try { jerseyClientConfig = clientConfig; jerseyClientConfig.getClasses().add(DiscoveryJerseyProvider.class); apacheHttpClient = ApacheHttpClient4.create(jerseyClientConfig); HttpParams params = apacheHttpClient.getClientHandler().getHttpClient().getParams(); HttpConnectionParams.setConnectionTimeout(params, connectionTimeout); HttpConnectionParams.setSoTimeout(params, readTimeout); eurekaConnCleaner.scheduleWithFixedDelay( new ConnectionCleanerTask(connectionIdleTimeout), HTTP_CONNECTION_CLEANER_INTERVAL_MS, HTTP_CONNECTION_CLEANER_INTERVAL_MS, TimeUnit.MILLISECONDS); } catch (Throwable e) { throw new RuntimeException("Cannot create Jersey client", e); } } public ApacheHttpClient4 getClient() { return apacheHttpClient; } /** * Clean up resources. */ public void destroyResources() { if (eurekaConnCleaner != null) { eurekaConnCleaner.shutdown(); } if (apacheHttpClient != null) { apacheHttpClient.destroy(); } } public static class EurekaJerseyClientBuilder { private boolean systemSSL; private String clientName; private int maxConnectionsPerHost; private int maxTotalConnections; private String trustStoreFileName; private String trustStorePassword; private String userAgent; private String proxyUserName; private String proxyPassword; private String proxyHost; private String proxyPort; private int connectionTimeout; private int readTimeout; private int connectionIdleTimeout; private String encoderName; private String decoderName; private String clientDataAccept; public EurekaJerseyClientBuilder withClientName(String clientName) { this.clientName = clientName; return this; } public EurekaJerseyClientBuilder withUserAgent(String userAgent) { this.userAgent = userAgent; return this; } public EurekaJerseyClientBuilder withConnectionTimeout(int connectionTimeout) { this.connectionTimeout = connectionTimeout; return this; } public EurekaJerseyClientBuilder withReadTimeout(int readTimeout) { this.readTimeout = readTimeout; return this; } public EurekaJerseyClientBuilder withConnectionIdleTimeout(int connectionIdleTimeout) { this.connectionIdleTimeout = connectionIdleTimeout; return this; } public EurekaJerseyClientBuilder withMaxConnectionsPerHost(int maxConnectionsPerHost) { this.maxConnectionsPerHost = maxConnectionsPerHost; return this; } public EurekaJerseyClientBuilder withMaxTotalConnections(int maxTotalConnections) { this.maxTotalConnections = maxTotalConnections; return this; } public EurekaJerseyClientBuilder withProxy(String proxyHost, String proxyPort, String user, String password) { this.proxyHost = proxyHost; this.proxyPort = proxyPort; this.proxyUserName = user; this.proxyPassword = password; return this; } public EurekaJerseyClientBuilder withSystemSSLConfiguration() { this.systemSSL = true; return this; } public EurekaJerseyClientBuilder withTrustStoreFile(String trustStoreFileName, String trustStorePassword) { this.trustStoreFileName = trustStoreFileName; this.trustStorePassword = trustStorePassword; return this; } public EurekaJerseyClientBuilder withEncoder(String encoderName) { this.encoderName = encoderName; return this; } public EurekaJerseyClientBuilder withDecoder(String decoderName, String clientDataAccept) { this.decoderName = decoderName; this.clientDataAccept = clientDataAccept; return this; } public EurekaJerseyClient build() { MyDefaultApacheHttpClient4Config config = new MyDefaultApacheHttpClient4Config(); try { return new EurekaJerseyClient(connectionTimeout, readTimeout, connectionIdleTimeout, config); } catch (Throwable e) { throw new RuntimeException("Cannot create Jersey client ", e); } } class MyDefaultApacheHttpClient4Config extends DefaultApacheHttpClient4Config { MyDefaultApacheHttpClient4Config() { MonitoredConnectionManager cm; if (systemSSL) { cm = createSystemSslCM(); } else if (trustStoreFileName != null) { cm = createCustomSslCM(); } else { cm = new MonitoredConnectionManager(clientName); } if (proxyHost != null) { addProxyConfiguration(cm); } DiscoveryJerseyProvider discoveryJerseyProvider = new DiscoveryJerseyProvider( CodecWrappers.getEncoder(encoderName), CodecWrappers.resolveDecoder(decoderName, clientDataAccept) ); getSingletons().add(discoveryJerseyProvider); // Common properties to all clients cm.setDefaultMaxPerRoute(maxConnectionsPerHost); cm.setMaxTotal(maxTotalConnections); getProperties().put(ApacheHttpClient4Config.PROPERTY_CONNECTION_MANAGER, cm); String fullUserAgentName = (userAgent == null ? clientName : userAgent) + "/v" + buildVersion(); getProperties().put(CoreProtocolPNames.USER_AGENT, fullUserAgentName); // To pin a client to specific server in case redirect happens, we handle redirects directly // (see DiscoveryClient.makeRemoteCall methods). getProperties().put(PROPERTY_FOLLOW_REDIRECTS, Boolean.FALSE); getProperties().put(ClientPNames.HANDLE_REDIRECTS, Boolean.FALSE); } private void addProxyConfiguration(MonitoredConnectionManager cm) { if (proxyUserName != null && proxyPassword != null) { getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_USERNAME, proxyUserName); getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_PASSWORD, proxyPassword); } else { // Due to bug in apache client, user name/password must always be set. // Otherwise proxy configuration is ignored. getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_USERNAME, "guest"); getProperties().put(ApacheHttpClient4Config.PROPERTY_PROXY_PASSWORD, "guest"); } getProperties().put(DefaultApacheHttpClient4Config.PROPERTY_PROXY_URI, "http://" + proxyHost + ":" + proxyPort); } private MonitoredConnectionManager createSystemSslCM() { MonitoredConnectionManager cm; SSLSocketFactory sslSocketFactory = SSLSocketFactory.getSystemSocketFactory(); SchemeRegistry sslSchemeRegistry = new SchemeRegistry(); sslSchemeRegistry.register(new Scheme(PROTOCOL, HTTPS_PORT, sslSocketFactory)); cm = new MonitoredConnectionManager(clientName, sslSchemeRegistry); return cm; } private MonitoredConnectionManager createCustomSslCM() { FileInputStream fin = null; try { SSLContext sslContext = SSLContext.getInstance(PROTOCOL_SCHEME); KeyStore sslKeyStore = KeyStore.getInstance(KEYSTORE_TYPE); fin = new FileInputStream(trustStoreFileName); sslKeyStore.load(fin, trustStorePassword.toCharArray()); TrustManagerFactory factory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); factory.init(sslKeyStore); TrustManager[] trustManagers = factory.getTrustManagers(); sslContext.init(null, trustManagers, null); SSLSocketFactory sslSocketFactory = new SSLSocketFactory(sslContext); sslSocketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); SchemeRegistry sslSchemeRegistry = new SchemeRegistry(); sslSchemeRegistry.register(new Scheme(PROTOCOL, HTTPS_PORT, sslSocketFactory)); return new MonitoredConnectionManager(clientName, sslSchemeRegistry); } catch (Exception ex) { throw new IllegalStateException("SSL configuration issue", ex); } finally { if (fin != null) { try { fin.close(); } catch (IOException ignore) { } } } } } } private class ConnectionCleanerTask implements Runnable { private final int connectionIdleTimeout; private final BasicTimer executionTimeStats; private final Counter cleanupFailed; private ConnectionCleanerTask(int connectionIdleTimeout) { this.connectionIdleTimeout = connectionIdleTimeout; MonitorConfig.Builder monitorConfigBuilder = MonitorConfig.builder("Eureka-Connection-Cleaner-Time"); executionTimeStats = new BasicTimer(monitorConfigBuilder.build()); cleanupFailed = new BasicCounter(MonitorConfig.builder("Eureka-Connection-Cleaner-Failure").build()); try { Monitors.registerObject(this); } catch (Exception e) { s_logger.error("Unable to register with servo.", e); } } @Override public void run() { Stopwatch start = executionTimeStats.start(); try { apacheHttpClient .getClientHandler() .getHttpClient() .getConnectionManager() .closeIdleConnections(connectionIdleTimeout, TimeUnit.SECONDS); } catch (Throwable e) { s_logger.error("Cannot clean connections", e); cleanupFailed.increment(); } finally { if (null != start) { start.stop(); } } } } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Middleware LLC. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.envers.reader; import static org.hibernate.envers.tools.ArgumentsTools.checkNotNull; import static org.hibernate.envers.tools.ArgumentsTools.checkPositive; import static org.hibernate.envers.tools.Tools.getTargetClassIfProxied; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.persistence.NoResultException; import org.hibernate.HibernateException; import org.hibernate.NonUniqueResultException; import org.hibernate.Query; import org.hibernate.Session; import org.hibernate.engine.SessionImplementor; import org.hibernate.envers.configuration.AuditConfiguration; import org.hibernate.envers.exception.AuditException; import org.hibernate.envers.exception.NotAuditedException; import org.hibernate.envers.exception.RevisionDoesNotExistException; import org.hibernate.envers.query.AuditEntity; import org.hibernate.envers.query.AuditQueryCreator; import org.hibernate.envers.synchronization.AuditProcess; import org.hibernate.event.EventSource; import org.hibernate.proxy.HibernateProxy; /** * @author Adam Warski (adam at warski dot org) * @author Hern&aacute;n Chanfreau */ public class AuditReaderImpl implements AuditReaderImplementor { private final AuditConfiguration verCfg; private final SessionImplementor sessionImplementor; private final Session session; private final FirstLevelCache firstLevelCache; public AuditReaderImpl(AuditConfiguration verCfg, Session session, SessionImplementor sessionImplementor) { this.verCfg = verCfg; this.sessionImplementor = sessionImplementor; this.session = session; firstLevelCache = new FirstLevelCache(); } private void checkSession() { if (!session.isOpen()) { throw new IllegalStateException("The associated entity manager is closed!"); } } public SessionImplementor getSessionImplementor() { return sessionImplementor; } public Session getSession() { return session; } public FirstLevelCache getFirstLevelCache() { return firstLevelCache; } public <T> T find(Class<T> cls, Object primaryKey, Number revision) throws IllegalArgumentException, NotAuditedException, IllegalStateException { cls = getTargetClassIfProxied(cls); return this.find(cls, cls.getName(), primaryKey, revision); } @SuppressWarnings({"unchecked"}) public <T> T find(Class<T> cls, String entityName, Object primaryKey, Number revision) throws IllegalArgumentException, NotAuditedException, IllegalStateException { cls = getTargetClassIfProxied(cls); checkNotNull(cls, "Entity class"); checkNotNull(entityName, "Entity name"); checkNotNull(primaryKey, "Primary key"); checkNotNull(revision, "Entity revision"); checkPositive(revision, "Entity revision"); checkSession(); if (!verCfg.getEntCfg().isVersioned(entityName)) { throw new NotAuditedException(entityName, entityName + " is not versioned!"); } if (firstLevelCache.contains(entityName, revision, primaryKey)) { return (T) firstLevelCache.get(entityName, revision, primaryKey); } Object result; try { // The result is put into the cache by the entity instantiator called from the query result = createQuery().forEntitiesAtRevision(cls, entityName, revision) .add(AuditEntity.id().eq(primaryKey)).getSingleResult(); } catch (NoResultException e) { result = null; } catch (NonUniqueResultException e) { throw new AuditException(e); } return (T) result; } public List<Number> getRevisions(Class<?> cls, Object primaryKey) throws IllegalArgumentException, NotAuditedException, IllegalStateException { cls = getTargetClassIfProxied(cls); return this.getRevisions(cls, cls.getName(), primaryKey); } @SuppressWarnings({"unchecked"}) public List<Number> getRevisions(Class<?> cls, String entityName, Object primaryKey) throws IllegalArgumentException, NotAuditedException, IllegalStateException { // todo: if a class is not versioned from the beginning, there's a missing ADD rev - what then? cls = getTargetClassIfProxied(cls); checkNotNull(cls, "Entity class"); checkNotNull(entityName, "Entity name"); checkNotNull(primaryKey, "Primary key"); checkSession(); if (!verCfg.getEntCfg().isVersioned(entityName)) { throw new NotAuditedException(entityName, entityName + " is not versioned!"); } return createQuery().forRevisionsOfEntity(cls, entityName, false, true) .addProjection(AuditEntity.revisionNumber()) .add(AuditEntity.id().eq(primaryKey)) .getResultList(); } public Date getRevisionDate(Number revision) throws IllegalArgumentException, RevisionDoesNotExistException, IllegalStateException{ checkNotNull(revision, "Entity revision"); checkPositive(revision, "Entity revision"); checkSession(); Query query = verCfg.getRevisionInfoQueryCreator().getRevisionDateQuery(session, revision); try { Object timestampObject = query.uniqueResult(); if (timestampObject == null) { throw new RevisionDoesNotExistException(revision); } // The timestamp object is either a date or a long return timestampObject instanceof Date ? (Date) timestampObject : new Date((Long) timestampObject); } catch (NonUniqueResultException e) { throw new AuditException(e); } } public Number getRevisionNumberForDate(Date date) { checkNotNull(date, "Date of revision"); checkSession(); Query query = verCfg.getRevisionInfoQueryCreator().getRevisionNumberForDateQuery(session, date); try { Number res = (Number) query.uniqueResult(); if (res == null) { throw new RevisionDoesNotExistException(date); } return res; } catch (NonUniqueResultException e) { throw new AuditException(e); } } @SuppressWarnings({"unchecked"}) public <T> T findRevision(Class<T> revisionEntityClass, Number revision) throws IllegalArgumentException, RevisionDoesNotExistException, IllegalStateException { checkNotNull(revision, "Entity revision"); checkPositive(revision, "Entity revision"); checkSession(); Set<Number> revisions = new HashSet<Number>(1); revisions.add(revision); Query query = verCfg.getRevisionInfoQueryCreator().getRevisionsQuery(session, revisions); try { T revisionData = (T) query.uniqueResult(); if (revisionData == null) { throw new RevisionDoesNotExistException(revision); } return revisionData; } catch (NonUniqueResultException e) { throw new AuditException(e); } } @SuppressWarnings({"unchecked"}) public <T> Map<Number, T> findRevisions(Class<T> revisionEntityClass, Set<Number> revisions) throws IllegalArgumentException, IllegalStateException { Map<Number, T> result = new HashMap<Number, T>(revisions.size()); for (Number revision : revisions) { checkNotNull(revision, "Entity revision"); checkPositive(revision, "Entity revision"); } checkSession(); Query query = verCfg.getRevisionInfoQueryCreator().getRevisionsQuery(session, revisions); try { List<T> revisionList = query.list(); for (T revision : revisionList) { Number revNo = verCfg.getRevisionInfoNumberReader().getRevisionNumber(revision); result.put(revNo, revision); } return result; } catch (HibernateException e) { throw new AuditException(e); } } @SuppressWarnings({"unchecked"}) public <T> T getCurrentRevision(Class<T> revisionEntityClass, boolean persist) { if (!(session instanceof EventSource)) { throw new IllegalArgumentException("The provided session is not an EventSource!"); } // Obtaining the current audit sync AuditProcess auditProcess = verCfg.getSyncManager().get((EventSource) session); // And getting the current revision data return (T) auditProcess.getCurrentRevisionData(session, persist); } public AuditQueryCreator createQuery() { return new AuditQueryCreator(verCfg, this); } public boolean isEntityClassAudited(Class<?> entityClass) { entityClass = getTargetClassIfProxied(entityClass); return this.isEntityNameAudited(entityClass.getName()); } public boolean isEntityNameAudited(String entityName) { checkNotNull(entityName, "Entity name"); checkSession(); return (verCfg.getEntCfg().isVersioned(entityName)); } public String getEntityName(Object primaryKey, Number revision ,Object entity) throws HibernateException{ checkNotNull(primaryKey, "Primary key"); checkNotNull(revision, "Entity revision"); checkPositive(revision, "Entity revision"); checkNotNull(entity, "Entity"); checkSession(); // Unwrap if necessary if(entity instanceof HibernateProxy) { entity = ((HibernateProxy)entity).getHibernateLazyInitializer().getImplementation(); } if(firstLevelCache.containsEntityName(primaryKey, revision, entity)) { // it's on envers FLC! return firstLevelCache.getFromEntityNameCache(primaryKey, revision, entity); } else { throw new HibernateException( "Envers can't resolve entityName for historic entity. The id, revision and entity is not on envers first level cache."); } } }
package php.runtime.memory.support; import php.runtime.Memory; import php.runtime.common.HintType; import php.runtime.env.Environment; import php.runtime.env.TraceInfo; import php.runtime.lang.spl.ArrayAccess; import php.runtime.memory.*; import php.runtime.memory.helper.ShortcutMemory; import java.util.*; public class MemoryUtils { protected final static Map<Class<?>, Unconverter> UNCONVERTERS = new HashMap<Class<?>, Unconverter>(){{ put(Double.class, new Unconverter<Double>() { @Override public Memory run(Double value) { return new DoubleMemory(value); } }); put(Double.TYPE, get(Double.class)); put(Float.class, new Unconverter<Float>() { @Override public Memory run(Float value) { return new DoubleMemory(value); } }); put(Float.TYPE, get(Float.class)); put(Long.class, new Unconverter<Long>() { @Override public Memory run(Long value) { return LongMemory.valueOf(value); } }); put(Long.TYPE, get(Long.class)); put(Integer.class, new Unconverter<Integer>() { @Override public Memory run(Integer value) { return LongMemory.valueOf(value); } }); put(Integer.TYPE, get(Integer.class)); put(Short.class, new Unconverter<Short>() { @Override public Memory run(Short value) { return LongMemory.valueOf(value); } }); put(Short.TYPE, get(Short.class)); put(Byte.class, new Unconverter<Byte>() { @Override public Memory run(Byte value) { return LongMemory.valueOf(value); } }); put(Byte.TYPE, get(Byte.class)); put(Character.class, new Unconverter<Character>() { @Override public Memory run(Character value) { return new StringMemory(value); } }); put(Character.TYPE, get(Character.class)); put(Boolean.class, new Unconverter<Boolean>() { @Override public Memory run(Boolean value) { return value ? Memory.TRUE : Memory.FALSE; } }); put(Boolean.TYPE, get(Boolean.class)); put(String.class, new Unconverter<String>() { @Override public Memory run(String value) { return new StringMemory(value); } }); put(Memory.class, new Unconverter<Memory>() { @Override public Memory run(Memory value) { return value; } }); put(Memory[].class, new Unconverter<Memory[]>() { @Override public Memory run(Memory[] value) { return new ArrayMemory(false, value); } }); }}; protected final static Map<Class<?>, Converter> CONVERTERS = new HashMap<Class<?>, Converter>(){{ // double put(Double.class, new Converter<Double>() { @Override public Double run(Environment env, TraceInfo trace, Memory value) { return value.toDouble(); } }); put(Double.TYPE, get(Double.class)); // float put(Float.class, new Converter<Float>() { @Override public Float run(Environment env, TraceInfo trace, Memory value) { return (float)value.toDouble(); } }); put(Float.TYPE, get(Float.class)); // long put(Long.class, new Converter<Long>() { @Override public Long run(Environment env, TraceInfo trace, Memory value) { return value.toLong(); } }); put(Long.TYPE, get(Long.class)); // int put(Integer.class, new Converter<Integer>() { @Override public Integer run(Environment env, TraceInfo trace, Memory value) { return (int)value.toLong(); } }); put(Integer.TYPE, get(Integer.class)); // short put(Short.class, new Converter<Short>() { @Override public Short run(Environment env, TraceInfo trace, Memory value) { return (short)value.toLong(); } }); put(Short.TYPE, get(Short.class)); // byte put(Byte.class, new Converter<Byte>() { @Override public Byte run(Environment env, TraceInfo trace, Memory value) { return (byte)value.toLong(); } }); put(Byte.TYPE, get(Byte.class)); // char put(Character.class, new Converter<Character>() { @Override public Character run(Environment env, TraceInfo trace, Memory value) { return value.toChar(); } }); put(Character.TYPE, get(Character.class)); // bool put(Boolean.class, new Converter<Boolean>() { @Override public Boolean run(Environment env, TraceInfo trace, Memory value) { return value.toBoolean(); } }); put(Boolean.TYPE, get(Boolean.class)); // string put(String.class, new Converter<String>() { @Override public String run(Environment env, TraceInfo trace, Memory value) { return value.toString(); } }); put(Memory.class, new Converter<Memory>() { @Override public Memory run(Environment env, TraceInfo trace, Memory value) { return value; } }); put(Memory[].class, new Converter<Memory[]>() { @Override public Memory[] run(Environment env, TraceInfo trace, Memory value) { if (value.isArray()){ List<Memory> result = new ArrayList<Memory>(); for(Memory one : (ArrayMemory)value){ result.add(one.fast_toImmutable()); } return result.toArray(new Memory[]{}); } else { return null; } } }); }}; public static Converter<?> getConverter(Class<?> type){ return CONVERTERS.get(type); } public static Converter<?>[] getConverters(Class<?>[] types){ Converter<?>[] result = new Converter[types.length]; for(int i = 0; i < types.length; i++){ result[i] = getConverter(types[i]); } return result; } public static Unconverter getUnconverter(Class<?> type){ return UNCONVERTERS.get(type); } public static Object fromMemory(Memory value, Class<?> type){ Converter converter = getConverter(type); if (converter != null) return converter.run(value); else return value; } @Deprecated public static Object toValue(Memory value, Class<?> type){ if (type == Double.TYPE || type == Double.class) return value.toDouble(); if (type == Float.TYPE || type == Float.class) return (float)value.toDouble(); if (type == Long.TYPE || type == Long.class) return value.toLong(); if (type == Integer.TYPE || type == Integer.class) return (int)value.toLong(); if (type == Short.TYPE || type == Short.class) return (short)value.toLong(); if (type == Byte.TYPE || type == Byte.class) return (byte)value.toLong(); if (type == Character.TYPE || type == Character.class) return value.toChar(); if (type == String.class) return value.toString(); if (type == Boolean.TYPE || type == Boolean.class) return value.toBoolean(); if (type == Memory.class) return value; if (type == Memory[].class){ if (value.isArray()){ List<Memory> result = new ArrayList<Memory>(); for(Memory one : (ArrayMemory)value){ result.add(one.toImmutable()); } return result.toArray(new Memory[]{}); } else { return null; } } throw new IllegalArgumentException("Unexpected class type: " + type.getName()); } @Deprecated public static Memory valueOf(Object value){ return valueOf(null, value); } @Deprecated public static Memory valueOf(Environment env, Object value){ if (value == null) return Memory.NULL; Unconverter unconverter = getUnconverter(value.getClass()); if (unconverter != null) { return unconverter.run(value); } else { if (value instanceof Memory) return (Memory)value; if (env == null) if (value instanceof Collection){ ArrayMemory result = new ArrayMemory(); for (Object el : (Collection)value) result.add(valueOf(el)); return result; } else if (value instanceof Map){ ArrayMemory result = new ArrayMemory(); for (Map.Entry el : ((Map<?, ?>)value).entrySet()) result.refOfIndex(valueOf(el.getKey())).assign(valueOf(el.getValue())); return result; } else if (value.getClass().isArray()){ ArrayMemory result = new ArrayMemory(); for (Object el : (Object[])value) result.add(valueOf(el)); return result; } return null; //} } } @Deprecated public static Memory valueOf(String value, HintType type){ switch (type){ case STRING: return new StringMemory(value); case ANY: if (value.equals("false")) return Memory.FALSE; if (value.equals("true")) return Memory.TRUE; else if (value.equalsIgnoreCase("null")) return Memory.NULL; Memory m = StringMemory.toNumeric(value, false, null); return m != null ? m : new StringMemory(value); case DOUBLE: return new DoubleMemory(Double.parseDouble(value)); case INT: { return LongMemory.valueOf(Long.parseLong(value)); } case ARRAY: return new ArrayMemory(); case BOOLEAN: return new StringMemory(value).toBoolean() ? Memory.TRUE : Memory.FALSE; case CALLABLE: return new StringMemory(value); default: throw new IllegalArgumentException("Unsupported type - " + type); } } public static Memory valueForList(Memory memory, TraceInfo traceInfo, long index) { if (memory.isArray() || memory.instanceOf(ArrayAccess.class)) { Memory valueOfIndex = memory.valueOfIndex(traceInfo, index); return valueOfIndex; } else { return Memory.NULL; } } public static Memory valueForList(Memory memory, long index) { return valueForList(memory, null, index); } public static Memory refValueForList(Memory memory, TraceInfo traceInfo, long index) { return refValueForList(memory, traceInfo, false, index); } public static Memory refValueForList(Memory memory, TraceInfo traceInfo, boolean inner, long index) { if (memory.isArray() || memory.instanceOf(ArrayAccess.class)) { Memory refOfIndex = memory.refOfIndex(traceInfo, index); if (refOfIndex.isUndefined()) { if (inner) { ArrayMemory arrayMemory = new ArrayMemory(); refOfIndex.assignRef(arrayMemory); if (memory.instanceOf(ArrayAccess.class)) { refOfIndex = memory.valueOfIndex(traceInfo, index); } } else { refOfIndex.assign(Memory.NULL); } } return refOfIndex; } else { return Memory.NULL; } } public static Memory valueForList(Memory memory, TraceInfo traceInfo, String index) { if (memory.isArray() || memory.instanceOf(ArrayAccess.class)) { Memory valueOfIndex = memory.valueOfIndex(traceInfo, index); return valueOfIndex; } else { return Memory.NULL; } } public static Memory valueForList(Memory memory, String index) { return valueForList(memory, null, index); } public static Memory refValueForList(Memory memory, TraceInfo traceInfo, String index) { return refValueForList(memory, traceInfo, false, index); } public static Memory refValueForList(Memory memory, TraceInfo traceInfo, boolean inner, String index) { if (memory.isArray() || memory.instanceOf(ArrayAccess.class)) { Memory refOfIndex = memory.refOfIndex(traceInfo, index); if (refOfIndex.isUndefined()) { if (inner) { ArrayMemory arrayMemory = new ArrayMemory(); refOfIndex.assignRef(arrayMemory); if (memory.instanceOf(ArrayAccess.class)) { refOfIndex = memory.valueOfIndex(traceInfo, index); } } else { refOfIndex.assign(Memory.NULL); } } return refOfIndex; } else { return Memory.NULL; } } public static Memory valueForList(Memory memory, TraceInfo traceInfo, Memory index) { if (memory.isArray() || memory.instanceOf(ArrayAccess.class)) { Memory valueOfIndex = memory.valueOfIndex(traceInfo, index); return valueOfIndex; } else { return Memory.NULL; } } public static Memory valueForList(Memory memory, Memory index) { return valueForList(memory, null, index); } public static Memory refValueForList(Memory memory, TraceInfo traceInfo, Memory index) { return refValueForList(memory, traceInfo, false, index); } public static Memory refValueForList(Memory memory, TraceInfo traceInfo, boolean inner, Memory index) { if (memory.isArray() || memory.instanceOf(ArrayAccess.class)) { Memory refOfIndex = memory.refOfIndex(traceInfo, index); if (refOfIndex.isUndefined()) { if (inner) { ArrayMemory arrayMemory = new ArrayMemory(); refOfIndex.assignRef(arrayMemory); if (memory.instanceOf(ArrayAccess.class)) { refOfIndex = memory.valueOfIndex(traceInfo, index); } } else { refOfIndex.assign(Memory.NULL); } } return refOfIndex; } else { return Memory.NULL; } } abstract public static class Converter<T> { abstract public T run(Environment env, TraceInfo trace, Memory value); final public T run(Memory value) { return run(null, null, value); } } public static interface Unconverter<T> { Memory run(T value); } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.period; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.GregorianCalendar; import java.util.List; import org.hisp.dhis.calendar.DateTimeUnit; import org.joda.time.DateTime; import org.joda.time.DateTimeConstants; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * @author Bob Jolliffe */ class WeeklyPeriodTypeTest { WeeklyAbstractPeriodType periodType; @BeforeEach void init() { periodType = new WeeklyPeriodType(); } @Test void testCreatePeriod() { DateTime testDate = new DateTime( 2009, 4, 27, 0, 0 ); WeeklyPeriodType wpt = new WeeklyPeriodType(); Period p = wpt.createPeriod( testDate.toDate() ); DateTime startDate = new DateTime( 2009, 4, 27, 0, 0 ); DateTime endDate = new DateTime( 2009, 5, 3, 0, 0 ); assertFalse( startDate.isAfter( p.getStartDate().getTime() ), "start date after given date" ); assertFalse( endDate.isAfter( p.getEndDate().getTime() ), "end date before given date" ); assertTrue( startDate.getDayOfWeek() == DateTimeConstants.MONDAY ); assertTrue( endDate.getDayOfWeek() == DateTimeConstants.SUNDAY ); } @Test void isoDates() { DateTime testDate = new DateTime( 2008, 12, 29, 0, 0 ); Period period = periodType.createPeriod( "2009W1" ); assertEquals( testDate.toDate(), period.getStartDate() ); testDate = new DateTime( 2011, 1, 3, 0, 0 ); period = periodType.createPeriod( "2011W1" ); assertEquals( testDate.toDate(), period.getStartDate() ); testDate = new DateTime( 2011, 3, 14, 0, 0 ); period = periodType.createPeriod( "2011W11" ); assertEquals( testDate.toDate(), period.getStartDate() ); } @Test void getIsoDate() { DateTime testDate = new DateTime( 2011, 1, 3, 0, 0 ); Period p = periodType.createPeriod( testDate.toDate() ); assertEquals( "2011W1", p.getIsoDate() ); // Monday testDate = new DateTime( 2012, 12, 31, 0, 0 ); p = periodType.createPeriod( testDate.toDate() ); assertEquals( "2013W1", p.getIsoDate() ); } @Test void testGetPeriodsBetween() { assertEquals( 1, periodType.createPeriod().getPeriodSpan( periodType ) ); assertEquals( 1, new WeeklyWednesdayPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 1, new WeeklyThursdayPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 1, new WeeklySaturdayPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 1, new WeeklySundayPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 4, new MonthlyPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 8, new BiMonthlyPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 13, new QuarterlyPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 26, new SixMonthlyPeriodType().createPeriod().getPeriodSpan( periodType ) ); assertEquals( 52, new YearlyPeriodType().createPeriod().getPeriodSpan( periodType ) ); } @Test void testGeneratePeriodsWithCalendar() { List<Period> periods = periodType.generatePeriods( new GregorianCalendar( 2009, 0, 1 ).getTime() ); assertEquals( 53, periods.size() ); periods = periodType.generatePeriods( new GregorianCalendar( 2011, 0, 3 ).getTime() ); assertEquals( 52, periods.size() ); } @Test void testGetIsoDate() { DateTime testDate = new DateTime( 2012, 12, 31, 0, 0 ); assertEquals( "2013W1", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2012, 12, 30, 0, 0 ); assertEquals( "2012W52", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2009, 12, 29, 0, 0 ); assertEquals( "2009W53", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2010, 1, 4, 0, 0 ); assertEquals( "2010W1", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); } @Test void testWeeklyWednesday() { periodType = new WeeklyWednesdayPeriodType(); DateTime testDate = new DateTime( 2017, 5, 4, 0, 0 ); assertEquals( "2017WedW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 3, 0, 0 ); assertEquals( "2017WedW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 4, 0, 0 ); assertEquals( "2017WedW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 2, 0, 0 ); assertEquals( "2017WedW17", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 1, 0, 0 ); assertEquals( "2017WedW17", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); Period period = periodType.createPeriod( "2017WedW17" ); assertNotNull( period ); assertEquals( "2017WedW17", periodType.getIsoDate( period ) ); period = periodType.createPeriod( "2017WedW18" ); assertNotNull( period ); assertEquals( "2017WedW18", periodType.getIsoDate( period ) ); } @Test void testWeeklyThursday() { periodType = new WeeklyThursdayPeriodType(); DateTime testDate = new DateTime( 2017, 5, 7, 0, 0 ); assertEquals( "2017ThuW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 6, 0, 0 ); assertEquals( "2017ThuW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 5, 0, 0 ); assertEquals( "2017ThuW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 4, 0, 0 ); assertEquals( "2017ThuW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 3, 0, 0 ); assertEquals( "2017ThuW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 2, 0, 0 ); assertEquals( "2017ThuW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 1, 0, 0 ); assertEquals( "2017ThuW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); Period period = periodType.createPeriod( "2017ThuW17" ); assertNotNull( period ); assertEquals( "2017ThuW17", periodType.getIsoDate( period ) ); period = periodType.createPeriod( "2017ThuW18" ); assertNotNull( period ); assertEquals( "2017ThuW18", periodType.getIsoDate( period ) ); } @Test void testWeeklySaturday() { periodType = new WeeklySaturdayPeriodType(); DateTime testDate = new DateTime( 2017, 5, 7, 0, 0 ); assertEquals( "2017SatW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 6, 0, 0 ); assertEquals( "2017SatW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 5, 0, 0 ); assertEquals( "2017SatW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 4, 0, 0 ); assertEquals( "2017SatW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 3, 0, 0 ); assertEquals( "2017SatW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 2, 0, 0 ); assertEquals( "2017SatW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 1, 0, 0 ); assertEquals( "2017SatW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); Period period = periodType.createPeriod( "2017SatW17" ); assertNotNull( period ); assertEquals( "2017SatW17", periodType.getIsoDate( period ) ); period = periodType.createPeriod( "2017SatW18" ); assertNotNull( period ); assertEquals( "2017SatW18", periodType.getIsoDate( period ) ); } @Test void testWeeklySunday() { periodType = new WeeklySundayPeriodType(); DateTime testDate = new DateTime( 2017, 5, 7, 0, 0 ); assertEquals( "2017SunW19", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 6, 0, 0 ); assertEquals( "2017SunW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 5, 0, 0 ); assertEquals( "2017SunW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 4, 0, 0 ); assertEquals( "2017SunW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 3, 0, 0 ); assertEquals( "2017SunW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 2, 0, 0 ); assertEquals( "2017SunW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 5, 1, 0, 0 ); assertEquals( "2017SunW18", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 1, 8, 0, 0 ); assertEquals( "2017SunW2", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); testDate = new DateTime( 2017, 1, 1, 0, 0 ); assertEquals( "2017SunW1", periodType.getIsoDate( new Period( periodType, testDate.toDate(), testDate.toDate() ) ) ); Period period = periodType.createPeriod( "2017SunW17" ); assertNotNull( period ); assertEquals( "2017SunW17", periodType.getIsoDate( period ) ); period = periodType.createPeriod( "2017SunW18" ); assertNotNull( period ); assertEquals( "2017SunW18", periodType.getIsoDate( period ) ); } @Test void testGenerateWeeklyPeriodWithinAYear() { periodType = new WeeklyPeriodType(); List<Period> periods = periodType.generatePeriods( new DateTimeUnit( 2019, 4, 1 ) ); assertEquals( 52, periods.size() ); assertFalse( periodsInYear( periods, 2018 ) ); assertFalse( periodsInYear( periods, 2019 ) ); assertFalse( periodsInYear( periods, 2020 ) ); assertEquals( 2018, DateTimeUnit.fromJdkDate( periods.get( 0 ).getStartDate() ).getYear() ); assertEquals( 2019, DateTimeUnit.fromJdkDate( periods.get( 1 ).getStartDate() ).getYear() ); periods = periodType.generatePeriods( new DateTimeUnit( 2018, 1, 1 ) ); assertEquals( 52, periods.size() ); assertFalse( periodsInYear( periods, 2017 ) ); assertTrue( periodsInYear( periods, 2018 ) ); assertFalse( periodsInYear( periods, 2019 ) ); periods = periodType.generatePeriods( new DateTimeUnit( 2015, 4, 1 ) ); assertEquals( 53, periods.size() ); assertFalse( periodsInYear( periods, 2014 ) ); assertFalse( periodsInYear( periods, 2015 ) ); assertFalse( periodsInYear( periods, 2016 ) ); assertEquals( 2014, DateTimeUnit.fromJdkDate( periods.get( 0 ).getStartDate() ).getYear() ); assertEquals( 2015, DateTimeUnit.fromJdkDate( periods.get( 1 ).getStartDate() ).getYear() ); periods = periodType.generatePeriods( new DateTimeUnit( 1990, 1, 1 ) ); assertEquals( 52, periods.size() ); assertFalse( periodsInYear( periods, 1989 ) ); assertTrue( periodsInYear( periods, 1990 ) ); assertFalse( periodsInYear( periods, 1991 ) ); assertEquals( 1990, DateTimeUnit.fromJdkDate( periods.get( 0 ).getStartDate() ).getYear() ); periods = periodType.generatePeriods( new DateTimeUnit( 1981, 1, 1 ) ); assertEquals( 53, periods.size() ); assertFalse( periodsInYear( periods, 1980 ) ); assertFalse( periodsInYear( periods, 1981 ) ); assertFalse( periodsInYear( periods, 1982 ) ); assertEquals( 1980, DateTimeUnit.fromJdkDate( periods.get( 0 ).getStartDate() ).getYear() ); assertEquals( 1981, DateTimeUnit.fromJdkDate( periods.get( 1 ).getStartDate() ).getYear() ); periods = periodType.generatePeriods( new DateTimeUnit( 1980, 12, 29 ) ); assertEquals( 52, periods.size() ); assertFalse( periodsInYear( periods, 1980 ) ); assertFalse( periodsInYear( periods, 1980 ) ); assertFalse( periodsInYear( periods, 1981 ) ); assertEquals( 1979, DateTimeUnit.fromJdkDate( periods.get( 0 ).getStartDate() ).getYear() ); assertEquals( 1980, DateTimeUnit.fromJdkDate( periods.get( 1 ).getStartDate() ).getYear() ); } private boolean periodsInYear( List<Period> periods, int year ) { for ( Period period : periods ) { DateTimeUnit start = DateTimeUnit.fromJdkDate( period.getStartDate() ); if ( start.getYear() != year ) { return false; } } return true; } @Test void testGetRewindedDate() { assertEquals( new DateTime( 2020, 1, 3, 0, 0 ).toDate(), periodType.getRewindedDate( new DateTime( 2020, 1, 24, 0, 0 ).toDate(), 3 ) ); assertEquals( new DateTime( 2020, 1, 15, 0, 0 ).toDate(), periodType.getRewindedDate( new DateTime( 2020, 1, 1, 0, 0 ).toDate(), -2 ) ); } }
package org.apache.maven.report.projectinfo; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.doxia.sink.Sink; import org.apache.maven.model.DistributionManagement; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.i18n.I18N; import org.codehaus.plexus.util.StringUtils; import java.util.Locale; /** * Generates the Project Distribution Management report. * * @author <a href="mailto:vincent.siveton@gmail.com">Vincent Siveton </a> * @version $Id$ * @since 2.3 */ @Mojo( name = "distribution-management" ) public class DistributionManagementReport extends AbstractProjectInfoReport { // ---------------------------------------------------------------------- // Public methods // ---------------------------------------------------------------------- @Override public boolean canGenerateReport() { boolean result = super.canGenerateReport(); if ( result && skipEmptyReport ) { result = getProject().getDistributionManagement() != null; } return result; } @Override public void executeReport( Locale locale ) { DistributionManagementRenderer r = new DistributionManagementRenderer( getSink(), getProject(), getI18N( locale ), locale ); r.render(); } /** {@inheritDoc} */ public String getOutputName() { return "distribution-management"; } @Override protected String getI18Nsection() { return "distributionManagement"; } // ---------------------------------------------------------------------- // Private // ---------------------------------------------------------------------- /** * Internal renderer class */ private static class DistributionManagementRenderer extends AbstractProjectInfoRenderer { private final MavenProject project; DistributionManagementRenderer( Sink sink, MavenProject project, I18N i18n, Locale locale ) { super( sink, i18n, locale ); this.project = project; } @Override protected String getI18Nsection() { return "distributionManagement"; } @Override public void renderBody() { DistributionManagement distributionManagement = project.getDistributionManagement(); if ( distributionManagement == null ) { startSection( getI18nString( "overview.title" ) ); paragraph( getI18nString( "nodistributionmanagement" ) ); endSection(); return; } startSection( getI18nString( "overview.title" ) ); paragraph( getI18nString( "overview.intro" ) ); if ( StringUtils.isNotEmpty( distributionManagement.getDownloadUrl() ) ) { startSection( getI18nString( "downloadURL" ) ); internalLink( distributionManagement.getDownloadUrl() ); endSection(); } if ( distributionManagement.getRelocation() != null ) { startSection( getI18nString( "relocation" ) ); startTable(); tableHeader( new String[] { getI18nString( "field" ), getI18nString( "value" ) } ); tableRow( new String[] { getI18nString( "relocation.groupid" ), distributionManagement.getRelocation().getGroupId() } ); tableRow( new String[] { getI18nString( "relocation.artifactid" ), distributionManagement.getRelocation().getArtifactId() } ); tableRow( new String[] { getI18nString( "relocation.version" ), distributionManagement.getRelocation().getVersion() } ); tableRow( new String[] { getI18nString( "relocation.message" ), distributionManagement.getRelocation().getMessage() } ); endTable(); endSection(); } if ( distributionManagement.getRepository() != null && StringUtils.isNotEmpty( distributionManagement.getRepository().getUrl() ) ) { startSection( getI18nString( "repository" ) + getRepoName( distributionManagement.getRepository().getId() ) ); internalLink( distributionManagement.getRepository().getUrl() ); endSection(); } if ( distributionManagement.getSnapshotRepository() != null && StringUtils.isNotEmpty( distributionManagement.getSnapshotRepository().getUrl() ) ) { startSection( getI18nString( "snapshotRepository" ) + getRepoName( distributionManagement.getSnapshotRepository().getId() ) ); internalLink( distributionManagement.getSnapshotRepository().getUrl() ); endSection(); } if ( distributionManagement.getSite() != null && StringUtils.isNotEmpty( distributionManagement.getSite().getUrl() ) ) { startSection( getI18nString( "site" ) + getRepoName( distributionManagement.getSite().getId() ) ); internalLink( distributionManagement.getSite().getUrl() ); endSection(); } endSection(); } private void internalLink( String url ) { if ( StringUtils.isEmpty( url ) ) { return; } String urlLowerCase = url.trim().toLowerCase( Locale.ENGLISH ); if ( urlLowerCase.startsWith( "http" ) || urlLowerCase.startsWith( "https" ) || urlLowerCase.startsWith( "ftp" ) ) { link( url, url ); } else { paragraph( url ); } } private String getRepoName( String name ) { if ( StringUtils.isNotEmpty( name ) ) { return " - " + name; } return ""; } } }
package com.planet_ink.coffee_mud.Libraries; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.io.IOException; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class SlaveryParser extends StdLibrary implements SlaveryLibrary { public String ID(){return "SlaveryParser";} public boolean tick(Tickable ticking, int tickID) { return true; } public Object[] fpmap=null; public Vector findMatch(MOB mob, Vector prereq) { Vector possibilities=new Vector(); Hashtable map=new Hashtable(); if(fpmap==null) { fpmap=new Object[pmap.length]; for(int p=0;p<pmap.length;p++) fpmap[p]=CMParms.toStringArray(CMParms.parse(pmap[p][0])); } String[] chk=null; String[] req=CMParms.toStringArray(prereq); boolean reject=false; int ci=0,ri=0; Object[] commands=new Object[req.length]; Social[] socials=new Social[req.length]; for(int i=0;i<req.length;i++) { socials[i]=CMLib.socials().fetchSocial(req[i],true); commands[i]=CMLib.english().findCommand(mob,CMParms.makeVector(req[i].toUpperCase())); } for(int p=0;p<fpmap.length;p++) { chk=(String[])fpmap[p]; ci=0;ri=0; reject=false; while((!reject)&&(ci<chk.length)&&(ri<req.length)) { if(chk[ci].equals(req[ri])) { ci++; ri++; reject=false; } else if(chk[ci].charAt(0)=='%') { switch(chk[ci].charAt(1)) { case 's': if(socials[ri]==null) reject=true; else { map.put("%s",req[ri]); reject=false; ci++; ri++; } break; case 'm': case 'g': case '*': case 'r': case 'i': String code=chk[ci]; int remain=chk.length-ci; String str=req[ri]; ri++; ci++; reject=false; while(ri<=(req.length-remain)) { String nxt=""; if(ci<chk.length) { nxt=chk[ci]; if(nxt.startsWith("%")) nxt=""; } if((nxt.length()>0) &&(ri<req.length) &&(req[ri].equals(nxt))) break; if(ri<req.length) str=str+" "+req[ri]; ri++; } map.put(code,str); break; case 'k': if(commands[ri]==null) reject=true; else { map.put("%k",req[ri]); reject=false; ci++; ri++; } break; default: break; } } else reject=true; } if((reject)||(ci!=chk.length)||(ri!=req.length)) { map.clear(); continue; } if(CMSecurity.isDebugging("GEAS")) Log.debugOut("GEAS","POSS-"+pmap[p][1]); map.put("INSTR",pmap[p][1]); possibilities.addElement(map); map=new Hashtable(); } return possibilities; } public String cleanWord(String s) { String chars=".,;!?'"; for(int x=0;x<chars.length();x++) for(int i=0;i<chars.length();i++) { while(s.startsWith(""+chars.charAt(i))) s=s.substring(1).trim(); while(s.endsWith(""+chars.charAt(i))) s=s.substring(0,s.length()-1).trim(); } return s; } public geasSteps processRequest(MOB you, MOB me, String req) { Vector REQ=CMParms.parse(req.toLowerCase().trim()); for(int v=0;v<REQ.size();v++) REQ.setElementAt(cleanWord((String)REQ.elementAt(v)),v); Vector poss=findMatch(me,REQ); if(poss.size()==0) { req=CMParms.combine(REQ,0); boolean doneSomething=true; boolean didAnything=false; while(doneSomething) { doneSomething=false; for(int i=0;i<universalStarters.length;i++) if(req.startsWith(universalStarters[i])) { doneSomething=true; didAnything=true; req=req.substring(universalStarters.length).trim(); } } if(didAnything) { REQ=CMParms.parse(req); poss=findMatch(me,REQ); } } if(CMSecurity.isDebugging("GEAS")) Log.debugOut("GEAS","POSSTOTAL-"+poss.size()); geasSteps geasSteps=new geasSteps(you,me); if(poss.size()==0) { geasStep g=new geasStep(geasSteps); g.que.addElement(CMParms.parse("wanderquery "+req)); geasSteps.addElement(g); } else { for(int i=0;i<poss.size();i++) { geasStep g=new geasStep(geasSteps); Hashtable map=(Hashtable)poss.elementAt(i); Vector all=CMParms.parseSemicolons((String)map.get("INSTR"),true); if(CMSecurity.isDebugging("GEAS")) Log.debugOut("GEAS",CMParms.toStringList(all)); g.que=new Vector(); for(int a=0;a<all.size();a++) g.que.addElement(CMParms.parse((String)all.elementAt(a))); if(you!=null) map.put("%c",you.name()); map.put("%n",me.name()); for(int q=0;q<g.que.size();q++) { Vector V=(Vector)g.que.elementAt(q); for(int v=0;v<V.size();v++) { String s=(String)V.elementAt(v); if(s.startsWith("%")) V.setElementAt(CMLib.english().cleanArticles((String)map.get(s.trim())),v); } } geasSteps.addElement(g); } } return geasSteps; } }
package org.zstack.network.service; import org.springframework.beans.factory.annotation.Autowired; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.Q; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.header.Component; import org.zstack.header.core.Completion; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.network.l3.*; import org.zstack.header.network.service.DhcpStruct; import org.zstack.header.network.service.NetworkServiceDhcpBackend; import org.zstack.header.network.service.NetworkServiceProviderType; import org.zstack.header.network.service.NetworkServiceType; import org.zstack.header.vm.*; import org.zstack.header.vm.VmInstanceSpec.HostName; import org.zstack.network.l3.IpRangeHelper; import org.zstack.utils.CollectionUtils; import org.zstack.utils.Utils; import org.zstack.utils.function.Function; import org.zstack.utils.logging.CLogger; import org.zstack.utils.network.IPv6Constants; import org.zstack.utils.network.NetworkUtils; import java.util.*; import java.util.stream.Collectors; /** * Created with IntelliJ IDEA. * User: frank * Time: 7:53 PM * To change this template use File | Settings | File Templates. */ public class DhcpExtension extends AbstractNetworkServiceExtension implements Component, VmDefaultL3NetworkChangedExtensionPoint { private static final CLogger logger = Utils.getLogger(DhcpExtension.class); @Autowired private PluginRegistry pluginRgty; private final Map<NetworkServiceProviderType, NetworkServiceDhcpBackend> dhcpBackends = new HashMap<NetworkServiceProviderType, NetworkServiceDhcpBackend>(); private final String RESULT = String.format("result.%s", DhcpExtension.class.getName()); public NetworkServiceType getNetworkServiceType() { return NetworkServiceType.DHCP; } private void doDhcp(final Iterator<Map.Entry<NetworkServiceDhcpBackend, List<DhcpStruct>>> it, final VmInstanceSpec spec, final Completion complete) { if (!it.hasNext()) { complete.success(); return; } Map.Entry<NetworkServiceDhcpBackend, List<DhcpStruct>> e = it.next(); NetworkServiceDhcpBackend bkd = e.getKey(); List<DhcpStruct> structs = e.getValue(); logger.debug(String.format("%s is applying DHCP service", bkd.getClass().getName())); bkd.applyDhcpService(structs, spec, new Completion(complete) { @Override public void success() { doDhcp(it, spec, complete); } @Override public void fail(ErrorCode errorCode) { complete.fail(errorCode); } }); } private void releaseDhcp(final Iterator<Map.Entry<NetworkServiceDhcpBackend, List<DhcpStruct>>> it, final VmInstanceSpec spec, final NoErrorCompletion completion) { if (!it.hasNext()) { completion.done(); return; } Map.Entry<NetworkServiceDhcpBackend, List<DhcpStruct>> e = it.next(); NetworkServiceDhcpBackend bkd = e.getKey(); List<DhcpStruct> structs = e.getValue(); logger.debug(String.format("%s is releasing DHCP service", bkd.getClass().getName())); bkd.releaseDhcpService(structs, spec, new NoErrorCompletion(completion) { @Override public void done() { releaseDhcp(it, spec, completion); } }); } @Override public void applyNetworkService(VmInstanceSpec spec, Map<String, Object> data, Completion complete) { Map<NetworkServiceDhcpBackend, List<DhcpStruct>> entries = workoutDhcp(spec); data.put(RESULT, entries); doDhcp(entries.entrySet().iterator(), spec, complete); } @Override public void releaseNetworkService(VmInstanceSpec spec, Map<String, Object> data, NoErrorCompletion completion) { Map<NetworkServiceDhcpBackend, List<DhcpStruct>> entries = (Map<NetworkServiceDhcpBackend, List<DhcpStruct>>) data.get(RESULT); if (entries == null) { entries = workoutDhcp(spec); } releaseDhcp(entries.entrySet().iterator(), spec, completion); } private void populateExtensions() { for (NetworkServiceDhcpBackend bkd : pluginRgty.getExtensionList(NetworkServiceDhcpBackend.class)) { NetworkServiceDhcpBackend old = dhcpBackends.get(bkd.getProviderType()); if (old != null) { throw new CloudRuntimeException(String.format("duplicate NetworkServiceDhcpBackend[%s, %s] for type[%s]", bkd.getClass().getName(), old.getClass().getName(), bkd.getProviderType())); } dhcpBackends.put(bkd.getProviderType(), bkd); } } public boolean isDualStackNicInSingleL3Network(VmNicInventory nic) { if (nic.getUsedIps().size() < 2) { return false; } return nic.getUsedIps().stream().map(UsedIpInventory::getL3NetworkUuid).distinct().count() == 1; } private DhcpStruct getDhcpStruct(VmInstanceInventory vm, List<VmInstanceSpec.HostName> hostNames, VmNicVO nic, UsedIpVO ip, boolean isDefaultNic) { String l3Uuid = nic.getL3NetworkUuid(); if (ip != null) { l3Uuid = ip.getL3NetworkUuid(); } L3NetworkInventory l3 = L3NetworkInventory.valueOf(dbf.findByUuid(l3Uuid, L3NetworkVO.class)); DhcpStruct struct = new DhcpStruct(); struct.setVmUuid(nic.getVmInstanceUuid()); String hostname = CollectionUtils.find(hostNames, new Function<String, HostName>() { @Override public String call(HostName arg) { return arg.getL3NetworkUuid().equals(l3.getUuid()) ? arg.getHostname() : null; } }); if (hostname != null && l3.getDnsDomain() != null) { hostname = String.format("%s.%s", hostname, l3.getDnsDomain()); } struct.setHostname(hostname); struct.setDnsDomain(l3.getDnsDomain()); struct.setL3Network(l3); struct.setDefaultL3Network(isDefaultNic); struct.setMac(nic.getMac()); struct.setMtu(new MtuGetter().getMtu(l3.getUuid())); struct.setNicType(nic.getType()); return struct; } private void setDualStackNicOfSingleL3Network(DhcpStruct struct, VmNicVO nic) { struct.setIpVersion(IPv6Constants.DUAL_STACK); List<UsedIpVO> sortedIps = nic.getUsedIps().stream().sorted(Comparator.comparingLong(UsedIpVO::getIpVersionl)).collect(Collectors.toList()); for (UsedIpVO ip : sortedIps) { if (ip.getIpVersion() == IPv6Constants.IPv4) { struct.setGateway(ip.getGateway()); struct.setIp(ip.getIp()); struct.setNetmask(ip.getNetmask()); if (struct.getHostname() == null) { struct.setHostname(ip.getIp().replaceAll("\\.", "-")); } } else { List<NormalIpRangeVO> iprs = Q.New(NormalIpRangeVO.class).eq(NormalIpRangeVO_.l3NetworkUuid, ip.getL3NetworkUuid()) .eq(NormalIpRangeVO_.ipVersion, ip.getIpVersion()).list(); if (iprs.get(0).getAddressMode().equals(IPv6Constants.SLAAC)) { continue; } struct.setGateway6(ip.getGateway()); struct.setIp6(ip.getIp()); struct.setRaMode(iprs.get(0).getAddressMode()); struct.setPrefixLength(iprs.get(0).getPrefixLen()); struct.setFirstIp(NetworkUtils.getSmallestIp(iprs.stream().map(IpRangeVO::getStartIp).collect(Collectors.toList()))); struct.setEndIP(NetworkUtils.getBiggesttIp(iprs.stream().map(IpRangeVO::getEndIp).collect(Collectors.toList()))); } } } private void setNicDhcp(DhcpStruct struct, UsedIpVO ip) { if (ip.getIpVersion() == IPv6Constants.IPv4) { struct.setGateway(ip.getGateway()); struct.setIp(ip.getIp()); struct.setNetmask(ip.getNetmask()); if (struct.getHostname() == null) { struct.setHostname(ip.getIp().replaceAll("\\.", "-")); } } else { List<NormalIpRangeVO> iprs = Q.New(NormalIpRangeVO.class).eq(NormalIpRangeVO_.l3NetworkUuid, ip.getL3NetworkUuid()) .eq(NormalIpRangeVO_.ipVersion, IPv6Constants.IPv6).list(); struct.setGateway6(ip.getGateway()); struct.setIp6(ip.getIp()); struct.setRaMode(iprs.get(0).getAddressMode()); struct.setPrefixLength(iprs.get(0).getPrefixLen()); struct.setFirstIp(NetworkUtils.getSmallestIp(iprs.stream().map(IpRangeVO::getStartIp).collect(Collectors.toList()))); struct.setEndIP(NetworkUtils.getBiggesttIp(iprs.stream().map(IpRangeVO::getEndIp).collect(Collectors.toList()))); } } public List<DhcpStruct> makeDhcpStruct(VmInstanceInventory vm, List<VmInstanceSpec.HostName> hostNames, List<VmNicVO> nics) { List<DhcpStruct> res = new ArrayList<>(); List<VmNicVO> defaultNics = nics.stream().filter(nic -> nic.getL3NetworkUuid().equals(vm.getDefaultL3NetworkUuid())).collect(Collectors.toList()); for (VmNicVO nic : nics) { boolean isDefaultNic = nic.equals(VmNicVO.findTheEarliestOne(defaultNics)); if (isDualStackNicInSingleL3Network(VmNicInventory.valueOf(nic))) { DhcpStruct struct = getDhcpStruct(vm, hostNames, nic, null, isDefaultNic); setDualStackNicOfSingleL3Network(struct, nic); res.add(struct); continue; } for (UsedIpVO ip : nic.getUsedIps()) { NormalIpRangeVO ipr = dbf.findByUuid(ip.getIpRangeUuid(), NormalIpRangeVO.class); if (ipr.getIpVersion() == IPv6Constants.IPv6 && (ipr.getAddressMode().equals(IPv6Constants.SLAAC))) { continue; } DhcpStruct struct = getDhcpStruct(vm, hostNames, nic, ip, isDefaultNic); struct.setIpVersion(ip.getIpVersion()); setNicDhcp(struct, ip); res.add(struct); } } return res; } private Map<NetworkServiceDhcpBackend, List<DhcpStruct>> workoutDhcp(VmInstanceSpec spec) { Map<NetworkServiceDhcpBackend, List<DhcpStruct>> map = new HashMap<NetworkServiceDhcpBackend, List<DhcpStruct>>(); Map<NetworkServiceProviderType, List<L3NetworkInventory>> providerMap = getNetworkServiceProviderMap(NetworkServiceType.DHCP, VmNicSpec.getL3NetworkInventoryOfSpec(spec.getL3Networks())); for (Map.Entry<NetworkServiceProviderType, List<L3NetworkInventory>> e : providerMap.entrySet()) { NetworkServiceProviderType ptype = e.getKey(); List<DhcpStruct> lst = new ArrayList<DhcpStruct>(); List<VmNicVO> nics = new ArrayList<>(); Map<String, L3NetworkInventory> l3Map = new HashMap<>(); for (L3NetworkInventory l3 : e.getValue()) { l3Map.put(l3.getUuid(), l3); } for (VmNicInventory inv : spec.getDestNics()) { VmNicVO vmNicVO = dbf.findByUuid(inv.getUuid(), VmNicVO.class); for (UsedIpVO ip : vmNicVO.getUsedIps()) { L3NetworkInventory l3 = l3Map.get(ip.getL3NetworkUuid()); if (l3 == null) { continue; } List<IpRangeInventory> iprs = IpRangeHelper.getNormalIpRanges(l3); if (iprs.isEmpty()) { continue; } if (!nics.contains(vmNicVO)) { nics.add(vmNicVO); } } } lst.addAll(makeDhcpStruct(spec.getVmInventory(), spec.getHostnames(), nics)); NetworkServiceDhcpBackend bkd = dhcpBackends.get(ptype); if (bkd == null) { throw new CloudRuntimeException(String.format("unable to find NetworkServiceDhcpBackend[provider type: %s]", ptype)); } map.put(bkd, lst); if (logger.isTraceEnabled()) { logger.trace(String.format("DHCP Backend[%s] is about to apply entries: \n%s", bkd.getClass().getName(), lst)); } } return map; } @Override public boolean start() { populateExtensions(); return true; } @Override public boolean stop() { return true; } @Override public void vmDefaultL3NetworkChanged(VmInstanceInventory vm, String previousL3, String nowL3) { List<String> l3Uuids = new ArrayList<String>(); if (previousL3 != null) { l3Uuids.add(previousL3); } if (nowL3 != null) { l3Uuids.add(nowL3); } SimpleQuery<L3NetworkVO> q = dbf.createQuery(L3NetworkVO.class); q.add(L3NetworkVO_.uuid, Op.IN, l3Uuids); List<L3NetworkVO> vos = q.list(); List<L3NetworkInventory> invs = L3NetworkInventory.valueOf(vos); Map<NetworkServiceProviderType, List<L3NetworkInventory>> providerMap = getNetworkServiceProviderMap(NetworkServiceType.DHCP, invs); for (Map.Entry<NetworkServiceProviderType, List<L3NetworkInventory>> e : providerMap.entrySet()) { NetworkServiceProviderType ptype = e.getKey(); NetworkServiceDhcpBackend bkd = dhcpBackends.get(ptype); if (bkd == null) { throw new CloudRuntimeException(String.format("unable to find NetworkServiceDhcpBackend[provider type: %s]", ptype)); } bkd.vmDefaultL3NetworkChanged(vm, previousL3, nowL3, new Completion(null) { @Override public void success() { // pass } @Override public void fail(ErrorCode errorCode) { logger.warn(String.format("unable to change the VM[uuid:%s]'s default L3 network in the DHCP backend, %s. You may need to reboot" + " the VM to use the new default L3 network setting", vm.getUuid(), errorCode)); } }); } } }
/** * generated by Xtext 2.9.2 */ package xmodelica.modelica.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import xmodelica.modelica.ArraySubscripts; import xmodelica.modelica.ClassDefinition; import xmodelica.modelica.ClassModification; import xmodelica.modelica.Composition; import xmodelica.modelica.ModelicaPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Class Definition</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#isFinal <em>Final</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#isEncapsulated <em>Encapsulated</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getName <em>Name</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getStr_comment <em>Str comment</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getComposition <em>Composition</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getName_end <em>Name end</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getPrefix <em>Prefix</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getName2 <em>Name2</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getSubs <em>Subs</em>}</li> * <li>{@link xmodelica.modelica.impl.ClassDefinitionImpl#getMod <em>Mod</em>}</li> * </ul> * * @generated */ public class ClassDefinitionImpl extends ElementImpl implements ClassDefinition { /** * The default value of the '{@link #isFinal() <em>Final</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isFinal() * @generated * @ordered */ protected static final boolean FINAL_EDEFAULT = false; /** * The cached value of the '{@link #isFinal() <em>Final</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isFinal() * @generated * @ordered */ protected boolean final_ = FINAL_EDEFAULT; /** * The default value of the '{@link #isEncapsulated() <em>Encapsulated</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isEncapsulated() * @generated * @ordered */ protected static final boolean ENCAPSULATED_EDEFAULT = false; /** * The cached value of the '{@link #isEncapsulated() <em>Encapsulated</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isEncapsulated() * @generated * @ordered */ protected boolean encapsulated = ENCAPSULATED_EDEFAULT; /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * The default value of the '{@link #getStr_comment() <em>Str comment</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStr_comment() * @generated * @ordered */ protected static final String STR_COMMENT_EDEFAULT = null; /** * The cached value of the '{@link #getStr_comment() <em>Str comment</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStr_comment() * @generated * @ordered */ protected String str_comment = STR_COMMENT_EDEFAULT; /** * The cached value of the '{@link #getComposition() <em>Composition</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getComposition() * @generated * @ordered */ protected Composition composition; /** * The default value of the '{@link #getName_end() <em>Name end</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName_end() * @generated * @ordered */ protected static final String NAME_END_EDEFAULT = null; /** * The cached value of the '{@link #getName_end() <em>Name end</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName_end() * @generated * @ordered */ protected String name_end = NAME_END_EDEFAULT; /** * The default value of the '{@link #getPrefix() <em>Prefix</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPrefix() * @generated * @ordered */ protected static final String PREFIX_EDEFAULT = null; /** * The cached value of the '{@link #getPrefix() <em>Prefix</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPrefix() * @generated * @ordered */ protected String prefix = PREFIX_EDEFAULT; /** * The default value of the '{@link #getName2() <em>Name2</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName2() * @generated * @ordered */ protected static final String NAME2_EDEFAULT = null; /** * The cached value of the '{@link #getName2() <em>Name2</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName2() * @generated * @ordered */ protected String name2 = NAME2_EDEFAULT; /** * The cached value of the '{@link #getSubs() <em>Subs</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSubs() * @generated * @ordered */ protected ArraySubscripts subs; /** * The cached value of the '{@link #getMod() <em>Mod</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMod() * @generated * @ordered */ protected ClassModification mod; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ClassDefinitionImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return ModelicaPackage.Literals.CLASS_DEFINITION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isFinal() { return final_; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setFinal(boolean newFinal) { boolean oldFinal = final_; final_ = newFinal; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__FINAL, oldFinal, final_)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isEncapsulated() { return encapsulated; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setEncapsulated(boolean newEncapsulated) { boolean oldEncapsulated = encapsulated; encapsulated = newEncapsulated; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__ENCAPSULATED, oldEncapsulated, encapsulated)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getStr_comment() { return str_comment; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStr_comment(String newStr_comment) { String oldStr_comment = str_comment; str_comment = newStr_comment; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__STR_COMMENT, oldStr_comment, str_comment)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Composition getComposition() { return composition; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetComposition(Composition newComposition, NotificationChain msgs) { Composition oldComposition = composition; composition = newComposition; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__COMPOSITION, oldComposition, newComposition); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setComposition(Composition newComposition) { if (newComposition != composition) { NotificationChain msgs = null; if (composition != null) msgs = ((InternalEObject)composition).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ModelicaPackage.CLASS_DEFINITION__COMPOSITION, null, msgs); if (newComposition != null) msgs = ((InternalEObject)newComposition).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ModelicaPackage.CLASS_DEFINITION__COMPOSITION, null, msgs); msgs = basicSetComposition(newComposition, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__COMPOSITION, newComposition, newComposition)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName_end() { return name_end; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName_end(String newName_end) { String oldName_end = name_end; name_end = newName_end; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__NAME_END, oldName_end, name_end)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getPrefix() { return prefix; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPrefix(String newPrefix) { String oldPrefix = prefix; prefix = newPrefix; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__PREFIX, oldPrefix, prefix)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName2() { return name2; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName2(String newName2) { String oldName2 = name2; name2 = newName2; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__NAME2, oldName2, name2)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ArraySubscripts getSubs() { return subs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetSubs(ArraySubscripts newSubs, NotificationChain msgs) { ArraySubscripts oldSubs = subs; subs = newSubs; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__SUBS, oldSubs, newSubs); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSubs(ArraySubscripts newSubs) { if (newSubs != subs) { NotificationChain msgs = null; if (subs != null) msgs = ((InternalEObject)subs).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ModelicaPackage.CLASS_DEFINITION__SUBS, null, msgs); if (newSubs != null) msgs = ((InternalEObject)newSubs).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ModelicaPackage.CLASS_DEFINITION__SUBS, null, msgs); msgs = basicSetSubs(newSubs, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__SUBS, newSubs, newSubs)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ClassModification getMod() { return mod; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetMod(ClassModification newMod, NotificationChain msgs) { ClassModification oldMod = mod; mod = newMod; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__MOD, oldMod, newMod); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMod(ClassModification newMod) { if (newMod != mod) { NotificationChain msgs = null; if (mod != null) msgs = ((InternalEObject)mod).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ModelicaPackage.CLASS_DEFINITION__MOD, null, msgs); if (newMod != null) msgs = ((InternalEObject)newMod).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ModelicaPackage.CLASS_DEFINITION__MOD, null, msgs); msgs = basicSetMod(newMod, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, ModelicaPackage.CLASS_DEFINITION__MOD, newMod, newMod)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case ModelicaPackage.CLASS_DEFINITION__COMPOSITION: return basicSetComposition(null, msgs); case ModelicaPackage.CLASS_DEFINITION__SUBS: return basicSetSubs(null, msgs); case ModelicaPackage.CLASS_DEFINITION__MOD: return basicSetMod(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case ModelicaPackage.CLASS_DEFINITION__FINAL: return isFinal(); case ModelicaPackage.CLASS_DEFINITION__ENCAPSULATED: return isEncapsulated(); case ModelicaPackage.CLASS_DEFINITION__NAME: return getName(); case ModelicaPackage.CLASS_DEFINITION__STR_COMMENT: return getStr_comment(); case ModelicaPackage.CLASS_DEFINITION__COMPOSITION: return getComposition(); case ModelicaPackage.CLASS_DEFINITION__NAME_END: return getName_end(); case ModelicaPackage.CLASS_DEFINITION__PREFIX: return getPrefix(); case ModelicaPackage.CLASS_DEFINITION__NAME2: return getName2(); case ModelicaPackage.CLASS_DEFINITION__SUBS: return getSubs(); case ModelicaPackage.CLASS_DEFINITION__MOD: return getMod(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case ModelicaPackage.CLASS_DEFINITION__FINAL: setFinal((Boolean)newValue); return; case ModelicaPackage.CLASS_DEFINITION__ENCAPSULATED: setEncapsulated((Boolean)newValue); return; case ModelicaPackage.CLASS_DEFINITION__NAME: setName((String)newValue); return; case ModelicaPackage.CLASS_DEFINITION__STR_COMMENT: setStr_comment((String)newValue); return; case ModelicaPackage.CLASS_DEFINITION__COMPOSITION: setComposition((Composition)newValue); return; case ModelicaPackage.CLASS_DEFINITION__NAME_END: setName_end((String)newValue); return; case ModelicaPackage.CLASS_DEFINITION__PREFIX: setPrefix((String)newValue); return; case ModelicaPackage.CLASS_DEFINITION__NAME2: setName2((String)newValue); return; case ModelicaPackage.CLASS_DEFINITION__SUBS: setSubs((ArraySubscripts)newValue); return; case ModelicaPackage.CLASS_DEFINITION__MOD: setMod((ClassModification)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case ModelicaPackage.CLASS_DEFINITION__FINAL: setFinal(FINAL_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__ENCAPSULATED: setEncapsulated(ENCAPSULATED_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__NAME: setName(NAME_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__STR_COMMENT: setStr_comment(STR_COMMENT_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__COMPOSITION: setComposition((Composition)null); return; case ModelicaPackage.CLASS_DEFINITION__NAME_END: setName_end(NAME_END_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__PREFIX: setPrefix(PREFIX_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__NAME2: setName2(NAME2_EDEFAULT); return; case ModelicaPackage.CLASS_DEFINITION__SUBS: setSubs((ArraySubscripts)null); return; case ModelicaPackage.CLASS_DEFINITION__MOD: setMod((ClassModification)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case ModelicaPackage.CLASS_DEFINITION__FINAL: return final_ != FINAL_EDEFAULT; case ModelicaPackage.CLASS_DEFINITION__ENCAPSULATED: return encapsulated != ENCAPSULATED_EDEFAULT; case ModelicaPackage.CLASS_DEFINITION__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case ModelicaPackage.CLASS_DEFINITION__STR_COMMENT: return STR_COMMENT_EDEFAULT == null ? str_comment != null : !STR_COMMENT_EDEFAULT.equals(str_comment); case ModelicaPackage.CLASS_DEFINITION__COMPOSITION: return composition != null; case ModelicaPackage.CLASS_DEFINITION__NAME_END: return NAME_END_EDEFAULT == null ? name_end != null : !NAME_END_EDEFAULT.equals(name_end); case ModelicaPackage.CLASS_DEFINITION__PREFIX: return PREFIX_EDEFAULT == null ? prefix != null : !PREFIX_EDEFAULT.equals(prefix); case ModelicaPackage.CLASS_DEFINITION__NAME2: return NAME2_EDEFAULT == null ? name2 != null : !NAME2_EDEFAULT.equals(name2); case ModelicaPackage.CLASS_DEFINITION__SUBS: return subs != null; case ModelicaPackage.CLASS_DEFINITION__MOD: return mod != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (final: "); result.append(final_); result.append(", encapsulated: "); result.append(encapsulated); result.append(", name: "); result.append(name); result.append(", str_comment: "); result.append(str_comment); result.append(", name_end: "); result.append(name_end); result.append(", prefix: "); result.append(prefix); result.append(", name2: "); result.append(name2); result.append(')'); return result.toString(); } } //ClassDefinitionImpl
/** */ package CIM.IEC61970.Informative.MarketOperations.impl; import CIM.IEC61970.Informative.MarketOperations.Bid; import CIM.IEC61970.Informative.MarketOperations.BidClearing; import CIM.IEC61970.Informative.MarketOperations.MarketOperationsPackage; import CIM.impl.ElementImpl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Bid Clearing</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.BidClearingImpl#getStartUpCost <em>Start Up Cost</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.BidClearingImpl#getBid <em>Bid</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.BidClearingImpl#getNoLoadCost <em>No Load Cost</em>}</li> * <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.BidClearingImpl#getLostOpCost <em>Lost Op Cost</em>}</li> * </ul> * * @generated */ public class BidClearingImpl extends ElementImpl implements BidClearing { /** * The default value of the '{@link #getStartUpCost() <em>Start Up Cost</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStartUpCost() * @generated * @ordered */ protected static final float START_UP_COST_EDEFAULT = 0.0F; /** * The cached value of the '{@link #getStartUpCost() <em>Start Up Cost</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStartUpCost() * @generated * @ordered */ protected float startUpCost = START_UP_COST_EDEFAULT; /** * The cached value of the '{@link #getBid() <em>Bid</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getBid() * @generated * @ordered */ protected Bid bid; /** * The default value of the '{@link #getNoLoadCost() <em>No Load Cost</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNoLoadCost() * @generated * @ordered */ protected static final float NO_LOAD_COST_EDEFAULT = 0.0F; /** * The cached value of the '{@link #getNoLoadCost() <em>No Load Cost</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getNoLoadCost() * @generated * @ordered */ protected float noLoadCost = NO_LOAD_COST_EDEFAULT; /** * The default value of the '{@link #getLostOpCost() <em>Lost Op Cost</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLostOpCost() * @generated * @ordered */ protected static final float LOST_OP_COST_EDEFAULT = 0.0F; /** * The cached value of the '{@link #getLostOpCost() <em>Lost Op Cost</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLostOpCost() * @generated * @ordered */ protected float lostOpCost = LOST_OP_COST_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected BidClearingImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MarketOperationsPackage.Literals.BID_CLEARING; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public float getStartUpCost() { return startUpCost; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStartUpCost(float newStartUpCost) { float oldStartUpCost = startUpCost; startUpCost = newStartUpCost; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID_CLEARING__START_UP_COST, oldStartUpCost, startUpCost)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Bid getBid() { if (bid != null && bid.eIsProxy()) { InternalEObject oldBid = (InternalEObject)bid; bid = (Bid)eResolveProxy(oldBid); if (bid != oldBid) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.BID_CLEARING__BID, oldBid, bid)); } } return bid; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Bid basicGetBid() { return bid; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetBid(Bid newBid, NotificationChain msgs) { Bid oldBid = bid; bid = newBid; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID_CLEARING__BID, oldBid, newBid); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setBid(Bid newBid) { if (newBid != bid) { NotificationChain msgs = null; if (bid != null) msgs = ((InternalEObject)bid).eInverseRemove(this, MarketOperationsPackage.BID__BID_CLEARING, Bid.class, msgs); if (newBid != null) msgs = ((InternalEObject)newBid).eInverseAdd(this, MarketOperationsPackage.BID__BID_CLEARING, Bid.class, msgs); msgs = basicSetBid(newBid, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID_CLEARING__BID, newBid, newBid)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public float getNoLoadCost() { return noLoadCost; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setNoLoadCost(float newNoLoadCost) { float oldNoLoadCost = noLoadCost; noLoadCost = newNoLoadCost; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID_CLEARING__NO_LOAD_COST, oldNoLoadCost, noLoadCost)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public float getLostOpCost() { return lostOpCost; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLostOpCost(float newLostOpCost) { float oldLostOpCost = lostOpCost; lostOpCost = newLostOpCost; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.BID_CLEARING__LOST_OP_COST, oldLostOpCost, lostOpCost)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MarketOperationsPackage.BID_CLEARING__BID: if (bid != null) msgs = ((InternalEObject)bid).eInverseRemove(this, MarketOperationsPackage.BID__BID_CLEARING, Bid.class, msgs); return basicSetBid((Bid)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MarketOperationsPackage.BID_CLEARING__BID: return basicSetBid(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MarketOperationsPackage.BID_CLEARING__START_UP_COST: return getStartUpCost(); case MarketOperationsPackage.BID_CLEARING__BID: if (resolve) return getBid(); return basicGetBid(); case MarketOperationsPackage.BID_CLEARING__NO_LOAD_COST: return getNoLoadCost(); case MarketOperationsPackage.BID_CLEARING__LOST_OP_COST: return getLostOpCost(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MarketOperationsPackage.BID_CLEARING__START_UP_COST: setStartUpCost((Float)newValue); return; case MarketOperationsPackage.BID_CLEARING__BID: setBid((Bid)newValue); return; case MarketOperationsPackage.BID_CLEARING__NO_LOAD_COST: setNoLoadCost((Float)newValue); return; case MarketOperationsPackage.BID_CLEARING__LOST_OP_COST: setLostOpCost((Float)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MarketOperationsPackage.BID_CLEARING__START_UP_COST: setStartUpCost(START_UP_COST_EDEFAULT); return; case MarketOperationsPackage.BID_CLEARING__BID: setBid((Bid)null); return; case MarketOperationsPackage.BID_CLEARING__NO_LOAD_COST: setNoLoadCost(NO_LOAD_COST_EDEFAULT); return; case MarketOperationsPackage.BID_CLEARING__LOST_OP_COST: setLostOpCost(LOST_OP_COST_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MarketOperationsPackage.BID_CLEARING__START_UP_COST: return startUpCost != START_UP_COST_EDEFAULT; case MarketOperationsPackage.BID_CLEARING__BID: return bid != null; case MarketOperationsPackage.BID_CLEARING__NO_LOAD_COST: return noLoadCost != NO_LOAD_COST_EDEFAULT; case MarketOperationsPackage.BID_CLEARING__LOST_OP_COST: return lostOpCost != LOST_OP_COST_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (startUpCost: "); result.append(startUpCost); result.append(", noLoadCost: "); result.append(noLoadCost); result.append(", lostOpCost: "); result.append(lostOpCost); result.append(')'); return result.toString(); } } //BidClearingImpl
/* * Licensed to Peter Karich under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * Peter Karich licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the * License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.util; import com.graphhopper.reader.OSMWay; import com.graphhopper.util.BitUtil; import com.graphhopper.util.EdgeIteratorState; import com.graphhopper.util.PMap; import com.graphhopper.util.PointList; import static com.graphhopper.util.Helper.*; /** * Stores two speed values into an edge to support avoiding too much incline * <p/> * @author Peter Karich */ public class Bike2WeightFlagEncoder extends BikeFlagEncoder { private EncodedDoubleValue reverseSpeedEncoder; public Bike2WeightFlagEncoder() { super(); } public Bike2WeightFlagEncoder( String propertiesStr ) { super(new PMap(propertiesStr)); } public Bike2WeightFlagEncoder( PMap properties ) { super(properties); } public Bike2WeightFlagEncoder( int speedBits, double speedFactor, int maxTurnCosts ) { super(speedBits, speedFactor, maxTurnCosts); } @Override public int getVersion() { return 1; } @Override public int defineWayBits( int index, int shift ) { shift = super.defineWayBits(index, shift); reverseSpeedEncoder = new EncodedDoubleValue("Reverse Speed", shift, speedBits, speedFactor, getHighwaySpeed("cycleway"), maxPossibleSpeed); shift += reverseSpeedEncoder.getBits(); return shift; } @Override public double getReverseSpeed( long flags ) { return reverseSpeedEncoder.getDoubleValue(flags); } @Override public long setReverseSpeed( long flags, double speed ) { if (speed < 0) throw new IllegalArgumentException("Speed cannot be negative: " + speed + ", flags:" + BitUtil.LITTLE.toBitString(flags)); if (speed < speedEncoder.factor / 2) return setLowSpeed(flags, speed, true); if (speed > getMaxSpeed()) speed = getMaxSpeed(); return reverseSpeedEncoder.setDoubleValue(flags, speed); } @Override public long handleSpeed( OSMWay way, double speed, long flags ) { // handle oneways flags = super.handleSpeed(way, speed, flags); if (isBackward(flags)) flags = setReverseSpeed(flags, speed); if (isForward(flags)) flags = setSpeed(flags, speed); return flags; } @Override protected long setLowSpeed( long flags, double speed, boolean reverse ) { if (reverse) return setBool(reverseSpeedEncoder.setDoubleValue(flags, 0), K_BACKWARD, false); return setBool(speedEncoder.setDoubleValue(flags, 0), K_FORWARD, false); } @Override public long flagsDefault( boolean forward, boolean backward ) { long flags = super.flagsDefault(forward, backward); if (backward) return reverseSpeedEncoder.setDefaultValue(flags); return flags; } @Override public long setProperties( double speed, boolean forward, boolean backward ) { long flags = super.setProperties(speed, forward, backward); if (backward) return setReverseSpeed(flags, speed); return flags; } @Override public long reverseFlags( long flags ) { // swap access flags = super.reverseFlags(flags); // swap speeds double otherValue = reverseSpeedEncoder.getDoubleValue(flags); flags = setReverseSpeed(flags, speedEncoder.getDoubleValue(flags)); return setSpeed(flags, otherValue); } @Override public void applyWayTags( OSMWay way, EdgeIteratorState edge ) { PointList pl = edge.fetchWayGeometry(3); if (!pl.is3D()) throw new IllegalStateException("To support speed calculation based on elevation data it is necessary to enable import of it."); long flags = edge.getFlags(); if (way.hasTag("tunnel", "yes") || way.hasTag("bridge", "yes") || way.hasTag("highway", "steps")) { // do not change speed // note: although tunnel can have a difference in elevation it is very unlikely that the elevation data is correct for a tunnel } else { // Decrease the speed for ele increase (incline), and decrease the speed for ele decrease (decline). The speed-decrease // has to be bigger (compared to the speed-increase) for the same elevation difference to simulate loosing energy and avoiding hills. // For the reverse speed this has to be the opposite but again keeping in mind that up+down difference. double incEleSum = 0, incDist2DSum = 0; double decEleSum = 0, decDist2DSum = 0; // double prevLat = pl.getLatitude(0), prevLon = pl.getLongitude(0); double prevEle = pl.getElevation(0); double fullDist2D = edge.getDistance(); if (Double.isInfinite(fullDist2D)) { System.err.println("infinity distance? for way:" + way.getId()); return; } // for short edges an incline makes no sense and for 0 distances could lead to NaN values for speed, see #432 if (fullDist2D < 1) return; double eleDelta = pl.getElevation(pl.size() - 1) - prevEle; if (eleDelta > 0.1) { incEleSum = eleDelta; incDist2DSum = fullDist2D; } else if (eleDelta < -0.1) { decEleSum = -eleDelta; decDist2DSum = fullDist2D; } // // get a more detailed elevation information, but due to bad SRTM data this does not make sense now. // for (int i = 1; i < pl.size(); i++) // { // double lat = pl.getLatitude(i); // double lon = pl.getLongitude(i); // double ele = pl.getElevation(i); // double eleDelta = ele - prevEle; // double dist2D = distCalc.calcDist(prevLat, prevLon, lat, lon); // if (eleDelta > 0.1) // { // incEleSum += eleDelta; // incDist2DSum += dist2D; // } else if (eleDelta < -0.1) // { // decEleSum += -eleDelta; // decDist2DSum += dist2D; // } // fullDist2D += dist2D; // prevLat = lat; // prevLon = lon; // prevEle = ele; // } // Calculate slop via tan(asin(height/distance)) but for rather smallish angles where we can assume tan a=a and sin a=a. // Then calculate a factor which decreases or increases the speed. // Do this via a simple quadratic equation where y(0)=1 and y(0.3)=1/4 for incline and y(0.3)=2 for decline double fwdIncline = incDist2DSum > 1 ? incEleSum / incDist2DSum : 0; double fwdDecline = decDist2DSum > 1 ? decEleSum / decDist2DSum : 0; double restDist2D = fullDist2D - incDist2DSum - decDist2DSum; double maxSpeed = getHighwaySpeed("cycleway"); if (isForward(flags)) { // use weighted mean so that longer incline infuences speed more than shorter double speed = getSpeed(flags); double fwdFaster = 1 + 2 * keepIn(fwdDecline, 0, 0.2); fwdFaster = fwdFaster * fwdFaster; double fwdSlower = 1 - 5 * keepIn(fwdIncline, 0, 0.2); fwdSlower = fwdSlower * fwdSlower; speed = speed * (fwdSlower * incDist2DSum + fwdFaster * decDist2DSum + 1 * restDist2D) / fullDist2D; flags = this.setSpeed(flags, keepIn(speed, PUSHING_SECTION_SPEED / 2, maxSpeed)); } if (isBackward(flags)) { double speedReverse = getReverseSpeed(flags); double bwFaster = 1 + 2 * keepIn(fwdIncline, 0, 0.2); bwFaster = bwFaster * bwFaster; double bwSlower = 1 - 5 * keepIn(fwdDecline, 0, 0.2); bwSlower = bwSlower * bwSlower; speedReverse = speedReverse * (bwFaster * incDist2DSum + bwSlower * decDist2DSum + 1 * restDist2D) / fullDist2D; flags = this.setReverseSpeed(flags, keepIn(speedReverse, PUSHING_SECTION_SPEED / 2, maxSpeed)); } } edge.setFlags(flags); } @Override public String toString() { return "bike2"; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.changes; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.*; import com.intellij.openapi.vcs.changes.ui.PlusMinusModify; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ThreeState; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.containers.OpenTHashSet; import com.intellij.vcsUtil.VcsUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.*; /** should work under _external_ lock * just logic here: do modifications to group of change lists */ public class ChangeListWorker implements ChangeListsWriteOperations { private final static Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.changes.ChangeListWorker"); private final Project myProject; private final Map<String, LocalChangeListImpl> myMap; // in fact, a kind of local change private final DeletedFilesHolder myLocallyDeleted; private final SwitchedFileHolder mySwitchedHolder; private LocalChangeListImpl myDefault; private ChangeListsIndexes myIdx; private final ChangesDelta myDelta; private final Set<String> myListsToDisappear; private final Map<LocalChangeListImpl, OpenTHashSet<Change>> myChangesBeforeUpdateMap = new HashMap<>(); public ChangeListWorker(final Project project, final PlusMinusModify<BaseRevision> deltaListener) { myProject = project; myMap = new LinkedHashMap<>(); myIdx = new ChangeListsIndexes(); myLocallyDeleted = new DeletedFilesHolder(); mySwitchedHolder = new SwitchedFileHolder(project, FileHolder.HolderType.SWITCHED); myDelta = new ChangesDelta(deltaListener); myListsToDisappear = ContainerUtil.newLinkedHashSet(); } private ChangeListWorker(final ChangeListWorker worker) { myProject = worker.myProject; myMap = new LinkedHashMap<>(); myIdx = new ChangeListsIndexes(worker.myIdx); myLocallyDeleted = worker.myLocallyDeleted.copy(); mySwitchedHolder = worker.mySwitchedHolder.copy(); myDelta = worker.myDelta; myListsToDisappear = ContainerUtil.newLinkedHashSet(worker.myListsToDisappear); LocalChangeListImpl defaultList = null; for (LocalChangeListImpl changeList : worker.myMap.values()) { final LocalChangeListImpl copy = changeList.copy(); final String changeListName = copy.getName(); myMap.put(changeListName, copy); if (copy.isDefault()) { defaultList = copy; } } if (defaultList == null) { LOG.info("default list not found when copy"); defaultList = myMap.get(worker.getDefaultListName()); } if (defaultList == null) { LOG.info("default list not found when copy in original object too"); if (!myMap.isEmpty()) { defaultList = myMap.values().iterator().next(); } else { // can be when there's no vcs configured ///LOG.error("no changelists at all"); } } myDefault = defaultList; } public void onAfterWorkerSwitch(@NotNull final ChangeListWorker previous) { checkForMultipleCopiesNotMove(myDelta.step(previous.myIdx, myIdx)); } private void checkForMultipleCopiesNotMove(boolean somethingChanged) { final MultiMap<FilePath, Pair<Change, String>> moves = new MultiMap<FilePath, Pair<Change, String>>() { @NotNull protected Collection<Pair<Change, String>> createCollection() { return new LinkedList<>(); } }; for (LocalChangeList changeList : myMap.values()) { final Collection<Change> changes = changeList.getChanges(); for (Change change : changes) { if (change.isMoved() || change.isRenamed()) { moves.putValue(change.getBeforeRevision().getFile(), Pair.create(change, changeList.getName())); } } } for (FilePath filePath : moves.keySet()) { final List<Pair<Change, String>> copies = (List<Pair<Change, String>>)moves.get(filePath); if (copies.size() == 1) continue; copies.sort(MyChangesAfterRevisionComparator.getInstance()); for (int i = 0; i < (copies.size() - 1); i++) { somethingChanged = true; final Pair<Change, String> item = copies.get(i); final Change oldChange = item.getFirst(); final Change newChange = new Change(null, oldChange.getAfterRevision()); final LocalChangeListImpl list = myMap.get(item.getSecond()); list.removeChange(oldChange); list.addChange(newChange); final VcsKey key = myIdx.getVcsFor(oldChange); myIdx.changeRemoved(oldChange); myIdx.changeAdded(newChange, key); } } if (somethingChanged) { FileStatusManager.getInstance(myProject).fileStatusesChanged(); } } public ChangeListWorker copy() { return new ChangeListWorker(this); } public boolean findListByName(@NotNull final String name) { return myMap.containsKey(name); } @Nullable public LocalChangeList getCopyByName(final String name) { return myMap.get(name); } @Nullable public LocalChangeList getChangeList(String id) { for (LocalChangeList changeList : myMap.values()) { if (changeList.getId().equals(id)) { return changeList.copy(); } } return null; } /** * @return if list with name exists, return previous default list name or null of there wasn't previous */ @Nullable public String setDefault(final String name) { final LocalChangeListImpl newDefault = myMap.get(name); if (newDefault == null) { return null; } String previousName = null; if (myDefault != null) { myDefault.setDefault(false); previousName = myDefault.getName(); } newDefault.setDefault(true); myDefault = newDefault; return previousName; } public boolean setReadOnly(final String name, final boolean value) { final LocalChangeList list = myMap.get(name); if (list != null) { list.setReadOnly(value); } return list != null; } public LocalChangeList addChangeList(@NotNull final String name, @Nullable final String comment, @Nullable Object data) { return addChangeList(null, name, comment, false, data); } LocalChangeList addChangeList(String id, @NotNull final String name, @Nullable final String description, final boolean inUpdate, @Nullable Object data) { final boolean contains = myMap.containsKey(name); LOG.assertTrue(!contains, "Attempt to create duplicate changelist " + name); final LocalChangeListImpl newList = LocalChangeListImpl.createEmptyChangeListImpl(myProject, name, id); newList.setData(data); if (description != null) { newList.setComment(description); } myMap.put(name, newList); if (inUpdate) { // scope is not important: nothing had been added jet, nothing to move to "old state" members startProcessingChanges(newList, null); // this is executed only when use through GATE } return newList.copy(); } public boolean addChangeToList(@NotNull final String name, final Change change, final VcsKey vcsKey) { LOG.debug("[addChangeToList] name: " + name + " change: " + ChangesUtil.getFilePath(change).getPath() + " vcs: " + (vcsKey == null ? null : vcsKey.getName())); final LocalChangeListImpl changeList = myMap.get(name); if (changeList != null) { changeList.addChange(change); myIdx.changeAdded(change, vcsKey); } return changeList != null; } public void addChangeToCorrespondingList(@NotNull Change change, final VcsKey vcsKey) { final String path = ChangesUtil.getFilePath(change).getPath(); LOG.debug("[addChangeToCorrespondingList] for change " + path + " type: " + change.getType() + " have before revision: " + (change.getBeforeRevision() != null)); assert myDefault != null; for (LocalChangeListImpl list : myMap.values()) { OpenTHashSet<Change> changesBeforeUpdate = myChangesBeforeUpdateMap.get(list); if (changesBeforeUpdate.contains(change)) { LOG.debug("[addChangeToCorrespondingList] matched: " + list.getName()); list.addChange(change); myIdx.changeAdded(change, vcsKey); return; } } LOG.debug("[addChangeToCorrespondingList] added to default list"); myDefault.addChange(change); myIdx.changeAdded(change, vcsKey); } public boolean removeChangeList(@NotNull String name) { final LocalChangeList list = myMap.get(name); if (list == null) { return false; } if (list.isDefault()) { throw new RuntimeException(new IncorrectOperationException("Cannot remove default changelist")); } final String listName = list.getName(); for (Change change : list.getChanges()) { myDefault.addChange(change); } myMap.remove(listName); return true; } @Nullable public MultiMap<LocalChangeList, Change> moveChangesTo(final String name, final Change[] changes) { final LocalChangeListImpl changeList = myMap.get(name); if (changeList != null) { final MultiMap<LocalChangeList, Change> result = new MultiMap<>(); for (LocalChangeListImpl list : myMap.values()) { if (list.equals(changeList)) continue; for (Change change : changes) { final Change removedChange = list.removeChange(change); if (removedChange != null) { changeList.addChange(removedChange); result.putValue(list, removedChange); } } } return result; } return null; } public boolean editName(@NotNull final String fromName, @NotNull final String toName) { if (fromName.equals(toName)) return false; final LocalChangeListImpl list = myMap.get(fromName); final boolean canEdit = list != null && (!list.isReadOnly()); if (canEdit) { list.setName(toName); myMap.remove(fromName); myMap.put(toName, list); } return canEdit; } @Nullable public String editComment(@NotNull final String fromName, final String newComment) { final LocalChangeListImpl list = myMap.get(fromName); if (list != null) { final String oldComment = list.getComment(); if (!Comparing.equal(oldComment, newComment)) { list.setComment(newComment); } return oldComment; } return null; } public boolean isEmpty() { return myMap.isEmpty(); } @Nullable public LocalChangeList getDefaultListCopy() { return myDefault == null ? null : myDefault.copy(); } public boolean isDefaultList(LocalChangeList list) { return myDefault != null && list.getId().equals(myDefault.getId()); } public Project getProject() { return myProject; } // called NOT under ChangeListManagerImpl lock public void notifyStartProcessingChanges(final VcsModifiableDirtyScope scope) { final Collection<Change> oldChanges = new ArrayList<>(); for (LocalChangeListImpl list : myMap.values()) { final Collection<Change> affectedChanges = startProcessingChanges(list, scope); if (!affectedChanges.isEmpty()) { oldChanges.addAll(affectedChanges); } } for (Change change : oldChanges) { myIdx.changeRemoved(change); } // scope should be modified for correct moves tracking correctScopeForMoves(scope, oldChanges); myLocallyDeleted.cleanAndAdjustScope(scope); mySwitchedHolder.cleanAndAdjustScope(scope); } private static void correctScopeForMoves(final VcsModifiableDirtyScope scope, final Collection<Change> changes) { if (scope == null) return; for (Change change : changes) { if (change.isMoved() || change.isRenamed()) { scope.addDirtyFile(change.getBeforeRevision().getFile()); scope.addDirtyFile(change.getAfterRevision().getFile()); } } } public void notifyDoneProcessingChanges(final ChangeListListener dispatcher) { List<ChangeList> changedLists = new ArrayList<>(); final Map<LocalChangeListImpl, List<Change>> removedChanges = new HashMap<>(); final Map<LocalChangeListImpl, List<Change>> addedChanges = new HashMap<>(); for (LocalChangeListImpl list : myMap.values()) { final List<Change> removed = new ArrayList<>(); final List<Change> added = new ArrayList<>(); if (doneProcessingChanges(list, removed, added)) { changedLists.add(list); } if (!removed.isEmpty()) { removedChanges.put(list, removed); } if (!added.isEmpty()) { addedChanges.put(list, added); } } for (Map.Entry<LocalChangeListImpl, List<Change>> entry : removedChanges.entrySet()) { dispatcher.changesRemoved(entry.getValue(), entry.getKey()); } for (Map.Entry<LocalChangeListImpl, List<Change>> entry : addedChanges.entrySet()) { dispatcher.changesAdded(entry.getValue(), entry.getKey()); } for (ChangeList changeList : changedLists) { dispatcher.changeListChanged(changeList); } for (String name : myListsToDisappear) { final LocalChangeList changeList = myMap.get(name); if ((changeList != null) && changeList.getChanges().isEmpty() && (!changeList.isReadOnly()) && (!changeList.isDefault())) { removeChangeList(name); } } myListsToDisappear.clear(); myChangesBeforeUpdateMap.clear(); } private Collection<Change> startProcessingChanges(@NotNull LocalChangeListImpl list, @Nullable final VcsDirtyScope scope) { OpenTHashSet<Change> changesBeforeUpdate = new OpenTHashSet<>(list.getChanges()); myChangesBeforeUpdateMap.put(list, changesBeforeUpdate); final Collection<Change> result = new ArrayList<>(); for (Change oldBoy : changesBeforeUpdate) { final ContentRevision before = oldBoy.getBeforeRevision(); final ContentRevision after = oldBoy.getAfterRevision(); if (scope == null || before != null && scope.belongsTo(before.getFile()) || after != null && scope.belongsTo(after.getFile()) || isIgnoredChange(oldBoy, myProject)) { result.add(oldBoy); list.removeChange(oldBoy); } } return result; } private static boolean isIgnoredChange(@NotNull Change change, @NotNull Project project) { boolean beforeRevIgnored = change.getBeforeRevision() == null || isIgnoredRevision(change.getBeforeRevision(), project); boolean afterRevIgnored = change.getAfterRevision() == null || isIgnoredRevision(change.getAfterRevision(), project); return beforeRevIgnored && afterRevIgnored; } private static boolean isIgnoredRevision(final @NotNull ContentRevision revision, final @NotNull Project project) { return ReadAction.compute(() -> { if (project.isDisposed()) { return false; } VirtualFile vFile = revision.getFile().getVirtualFile(); return vFile != null && ProjectLevelVcsManager.getInstance(project).isIgnored(vFile); }); } private boolean doneProcessingChanges(@NotNull LocalChangeListImpl list, List<Change> removedChanges, List<Change> addedChanges) { OpenTHashSet<Change> changesBeforeUpdate = myChangesBeforeUpdateMap.get(list); Set<Change> changes = list.getChanges(); boolean changesDetected = (changes.size() != changesBeforeUpdate.size()); for (Change newChange : changes) { Change oldChange = findOldChange(changesBeforeUpdate, newChange); if (oldChange == null) { addedChanges.add(newChange); } } changesDetected |= (!addedChanges.isEmpty()); final List<Change> removed = new ArrayList<>(changesBeforeUpdate); // since there are SAME objects... removed.removeAll(changes); removedChanges.addAll(removed); changesDetected = changesDetected || (!removedChanges.isEmpty()); return changesDetected; } @Nullable private static Change findOldChange(OpenTHashSet<Change> changesBeforeUpdate, Change newChange) { Change oldChange = changesBeforeUpdate.get(newChange); if (oldChange != null && sameBeforeRevision(oldChange, newChange) && newChange.getFileStatus().equals(oldChange.getFileStatus())) { return oldChange; } return null; } private static boolean sameBeforeRevision(final Change change1, final Change change2) { final ContentRevision b1 = change1.getBeforeRevision(); final ContentRevision b2 = change2.getBeforeRevision(); if (b1 != null && b2 != null) { final VcsRevisionNumber rn1 = b1.getRevisionNumber(); final VcsRevisionNumber rn2 = b2.getRevisionNumber(); final boolean isBinary1 = (b1 instanceof BinaryContentRevision); final boolean isBinary2 = (b2 instanceof BinaryContentRevision); return rn1 != VcsRevisionNumber.NULL && rn2 != VcsRevisionNumber.NULL && rn1.compareTo(rn2) == 0 && isBinary1 == isBinary2; } return b1 == null && b2 == null; } @NotNull public List<LocalChangeList> getListsCopy() { final List<LocalChangeList> result = new ArrayList<>(); for (LocalChangeList list : myMap.values()) { result.add(list.copy()); } return result; } public String getDefaultListName() { return myDefault == null ? null : myDefault.getName(); } public List<File> getAffectedPaths() { final SortedSet<FilePath> set = myIdx.getAffectedPaths(); final List<File> result = new ArrayList<>(set.size()); for (FilePath path : set) { result.add(path.getIOFile()); } return result; } @NotNull public List<VirtualFile> getAffectedFiles() { final Set<VirtualFile> result = ContainerUtil.newLinkedHashSet(); for (LocalChangeList list : myMap.values()) { for (Change change : list.getChanges()) { final ContentRevision before = change.getBeforeRevision(); final ContentRevision after = change.getAfterRevision(); if (before != null) { final VirtualFile file = before.getFile().getVirtualFile(); if (file != null) { result.add(file); } } if (after != null) { final VirtualFile file = after.getFile().getVirtualFile(); if (file != null) { result.add(file); } } } } return new ArrayList<>(result); } @Nullable public LocalChangeList getListCopy(@NotNull final VirtualFile file) { FilePath filePath = VcsUtil.getFilePath(file); for (LocalChangeList list : myMap.values()) { for (Change change : list.getChanges()) { if (change.getAfterRevision() != null && Comparing.equal(change.getAfterRevision().getFile(), filePath) || change.getBeforeRevision() != null && Comparing.equal(change.getBeforeRevision().getFile(), filePath)) { return list.copy(); } } } return null; } @Nullable public Change getChangeForPath(final FilePath file) { for (LocalChangeList list : myMap.values()) { for (Change change : list.getChanges()) { final ContentRevision afterRevision = change.getAfterRevision(); if (afterRevision != null && afterRevision.getFile().equals(file)) { return change; } final ContentRevision beforeRevision = change.getBeforeRevision(); if (beforeRevision != null && beforeRevision.getFile().equals(file)) { return change; } } } return null; } public FileStatus getStatus(final VirtualFile file) { return myIdx.getStatus(file); } public FileStatus getStatus(final FilePath file) { return myIdx.getStatus(file); } public DeletedFilesHolder getLocallyDeleted() { return myLocallyDeleted.copy(); } public SwitchedFileHolder getSwitchedHolder() { return mySwitchedHolder.copy(); } public void addSwitched(final VirtualFile file, @NotNull String branchName, final boolean recursive) { mySwitchedHolder.addFile(file, branchName, recursive); } public void removeSwitched(final VirtualFile file) { mySwitchedHolder.removeFile(file); } public String getBranchForFile(final VirtualFile file) { return mySwitchedHolder.getBranchForFile(file); } public boolean isSwitched(final VirtualFile file) { return mySwitchedHolder.containsFile(file); } public void addLocallyDeleted(final LocallyDeletedChange change) { myLocallyDeleted.addFile(change); } public boolean isContainedInLocallyDeleted(final FilePath filePath) { return myLocallyDeleted.isContainedInLocallyDeleted(filePath); } public void notifyVcsStarted(AbstractVcs vcs) { myLocallyDeleted.notifyVcsStarted(vcs); mySwitchedHolder.notifyVcsStarted(vcs); } public Collection<Change> getAllChanges() { final Collection<Change> changes = new HashSet<>(); for (LocalChangeList list : myMap.values()) { changes.addAll(list.getChanges()); } return changes; } public int getChangeListsNumber() { return myMap.size(); } private abstract class ExternalVsInternalChangesIntersection { protected final Collection<Change> myInChanges; protected final Map<Couple<String>, LocalChangeList> myInternalMap; protected final LocalChangeList myDefaultCopy; protected final Map<String, LocalChangeList> myIncludedListsCopies; protected ExternalVsInternalChangesIntersection(final Collection<Change> inChanges) { myInChanges = inChanges; myInternalMap = new HashMap<>(); myDefaultCopy = myDefault.copy(); myIncludedListsCopies = new HashMap<>(); } private Couple<String> keyForChange(final Change change) { final FilePath beforePath = ChangesUtil.getBeforePath(change); final String beforeKey = beforePath == null ? null : beforePath.getPath(); final FilePath afterPath = ChangesUtil.getAfterPath(change); final String afterKey = afterPath == null ? null : afterPath.getPath(); return Couple.of(beforeKey, afterKey); } private void preparation() { for (LocalChangeList list : myMap.values()) { final Collection<Change> managerChanges = list.getChanges(); final LocalChangeList copy = list.copy(); for (Change change : managerChanges) { myInternalMap.put(keyForChange(change), copy); } } } protected abstract void processInChange(final Couple<String> key, final Change change); public void run() { preparation(); for (Change change : myInChanges) { final Couple<String> key = keyForChange(change); processInChange(key, change); } } public Map<String, LocalChangeList> getIncludedListsCopies() { return myIncludedListsCopies; } } private class GatherChangesVsListsInfo extends ExternalVsInternalChangesIntersection { private final Map<String, List<Change>> myListToChangesMap; private GatherChangesVsListsInfo(final Collection<Change> inChanges) { super(inChanges); myListToChangesMap = new HashMap<>(); } protected void processInChange(Couple<String> key, Change change) { LocalChangeList tmpList = myInternalMap.get(key); if (tmpList == null) { tmpList = myDefaultCopy; } final String tmpName = tmpList.getName(); List<Change> list = myListToChangesMap.get(tmpName); if (list == null) { list = new ArrayList<>(); myListToChangesMap.put(tmpName, list); myIncludedListsCopies.put(tmpName, tmpList); } list.add(change); } public Map<String, List<Change>> getListToChangesMap() { return myListToChangesMap; } } private class GatherListsFilterValidChanges extends ExternalVsInternalChangesIntersection { private final List<Change> myValidChanges; private GatherListsFilterValidChanges(final Collection<Change> inChanges) { super(inChanges); myValidChanges = new ArrayList<>(); } protected void processInChange(Couple<String> key, Change change) { final LocalChangeList list = myInternalMap.get(key); if (list != null) { myIncludedListsCopies.put(list.getName(), list); myValidChanges.add(change); } } public List<Change> getValidChanges() { return myValidChanges; } } @NotNull public Map<String, List<Change>> listsForChanges(final Collection<Change> changes, final Map<String, LocalChangeList> lists) { final GatherChangesVsListsInfo info = new GatherChangesVsListsInfo(changes); info.run(); lists.putAll(info.getIncludedListsCopies()); return info.getListToChangesMap(); } @NotNull public Collection<LocalChangeList> getInvolvedListsFilterChanges(final Collection<Change> changes, final List<Change> validChanges) { final GatherListsFilterValidChanges worker = new GatherListsFilterValidChanges(changes); worker.run(); validChanges.addAll(worker.getValidChanges()); return worker.getIncludedListsCopies().values(); } @Nullable public LocalChangeList listForChange(final Change change) { for (LocalChangeList list : myMap.values()) { if (list.getChanges().contains(change)) return list.copy(); } return null; } @Nullable public String listNameIfOnlyOne(final @Nullable Change[] changes) { if (changes == null || changes.length == 0) { return null; } final Change first = changes[0]; for (LocalChangeList list : myMap.values()) { final Collection<Change> listChanges = list.getChanges(); if (listChanges.contains(first)) { // must contain all other for (int i = 1; i < changes.length; i++) { final Change change = changes[i]; if (!listChanges.contains(change)) { return null; } } return list.getName(); } } return null; } public ThreeState haveChangesUnder(@NotNull VirtualFile virtualFile) { FilePath dir = VcsUtil.getFilePath(virtualFile); FilePath changeCandidate = myIdx.getAffectedPaths().ceiling(dir); if (changeCandidate == null) { return ThreeState.NO; } return FileUtil.isAncestorThreeState(dir.getPath(), changeCandidate.getPath(), false); } @NotNull public Collection<Change> getChangesIn(final FilePath dirPath) { List<Change> changes = new ArrayList<>(); for (ChangeList list : myMap.values()) { for (Change change : list.getChanges()) { final ContentRevision afterRevision = change.getAfterRevision(); if (afterRevision != null && afterRevision.getFile().isUnder(dirPath, false)) { changes.add(change); continue; } final ContentRevision beforeRevision = change.getBeforeRevision(); if (beforeRevision != null && beforeRevision.getFile().isUnder(dirPath, false)) { changes.add(change); } } } return changes; } @Nullable VcsKey getVcsFor(@NotNull Change change) { return myIdx.getVcsFor(change); } void setListsToDisappear(final Collection<String> names) { myListsToDisappear.addAll(names); } @NotNull ChangeListManagerGate createSelfGate() { return new MyGate(this); } private static class MyGate implements ChangeListManagerGate { private final ChangeListWorker myWorker; private MyGate(final ChangeListWorker worker) { myWorker = worker; } @Override public List<LocalChangeList> getListsCopy() { return myWorker.getListsCopy(); } @Nullable @Override public LocalChangeList findChangeList(final String name) { return myWorker.getCopyByName(name); } @Override public LocalChangeList addChangeList(final String name, final String comment) { return myWorker.addChangeList(null, name, comment, true, null); } @Override public LocalChangeList findOrCreateList(final String name, final String comment) { LocalChangeList list = myWorker.getCopyByName(name); if (list == null) { list = addChangeList(name, comment); } return list; } @Override public void editComment(final String name, final String comment) { myWorker.editComment(name, comment); } @Override public void editName(String oldName, String newName) { myWorker.editName(oldName, newName); } @Override public void setListsToDisappear(final Collection<String> names) { myWorker.setListsToDisappear(names); } @Override public FileStatus getStatus(VirtualFile file) { return myWorker.getStatus(file); } @Deprecated @Override public FileStatus getStatus(File file) { return myWorker.getStatus(VcsUtil.getFilePath(file)); } @Override public FileStatus getStatus(@NotNull FilePath filePath) { return myWorker.getStatus(filePath); } @Override public void setDefaultChangeList(@NotNull String list) { myWorker.setDefault(list); } } public void removeRegisteredChangeFor(FilePath path) { myIdx.remove(path); for (LocalChangeListImpl list : myMap.values()) { for (Change change : list.getChanges()) { final ContentRevision afterRevision = change.getAfterRevision(); if (afterRevision != null && afterRevision.getFile().equals(path)) { list.removeChange(change); return; } final ContentRevision beforeRevision = change.getBeforeRevision(); if (beforeRevision != null && beforeRevision.getFile().equals(path)) { list.removeChange(change); return; } } } } // assumes after revisions are all not null private static class MyChangesAfterRevisionComparator implements Comparator<Pair<Change, String>> { private static final MyChangesAfterRevisionComparator ourInstance = new MyChangesAfterRevisionComparator(); public static MyChangesAfterRevisionComparator getInstance() { return ourInstance; } public int compare(final Pair<Change, String> o1, final Pair<Change, String> o2) { final String s1 = o1.getFirst().getAfterRevision().getFile().getPresentableUrl(); final String s2 = o2.getFirst().getAfterRevision().getFile().getPresentableUrl(); return SystemInfo.isFileSystemCaseSensitive ? s1.compareTo(s2) : s1.compareToIgnoreCase(s2); } } @Override public String toString() { return String.format("ChangeListWorker{myMap=%s}", StringUtil.join(myMap.values(), list -> { return String.format("list: %s changes: %s", list.getName(), StringUtil.join(list.getChanges(), ", ")); }, "\n")); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.volley.toolbox; import com.android.volley.AuthFailureError; import com.android.volley.Request; import com.android.volley.Request.Method; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.StatusLine; import org.apache.http.entity.BasicHttpEntity; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicStatusLine; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLSocketFactory; /** * An {@link HttpStack} based on {@link HttpURLConnection}. */ public class HurlStack implements HttpStack { private static final String HEADER_CONTENT_TYPE = "Content-Type"; /** * An interface for transforming URLs before use. */ public interface UrlRewriter { /** * Returns a URL to use instead of the provided one, or null to indicate * this URL should not be used at all. */ public String rewriteUrl(String originalUrl); } private final UrlRewriter mUrlRewriter; private final SSLSocketFactory mSslSocketFactory; public HurlStack() { this(null); } /** * @param urlRewriter Rewriter to use for request URLs */ public HurlStack(UrlRewriter urlRewriter) { this(urlRewriter, null); } /** * @param urlRewriter Rewriter to use for request URLs * @param sslSocketFactory SSL factory to use for HTTPS connections */ public HurlStack(UrlRewriter urlRewriter, SSLSocketFactory sslSocketFactory) { mUrlRewriter = urlRewriter; mSslSocketFactory = sslSocketFactory; } @Override public HttpResponse performRequest(Request<?> request, Map<String, String> additionalHeaders) throws IOException, AuthFailureError { String url = request.getUrl(); HashMap<String, String> map = new HashMap<String, String>(); map.putAll(request.getHeaders()); map.putAll(additionalHeaders); if (mUrlRewriter != null) { String rewritten = mUrlRewriter.rewriteUrl(url); if (rewritten == null) { throw new IOException("URL blocked by rewriter: " + url); } url = rewritten; } URL parsedUrl = new URL(url); HttpURLConnection connection = openConnection(parsedUrl, request); for (String headerName : map.keySet()) { connection.addRequestProperty(headerName, map.get(headerName)); } setConnectionParametersForRequest(connection, request); // Initialize HttpResponse with data from the HttpURLConnection. ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); int responseCode = connection.getResponseCode(); if (responseCode == -1) { // -1 is returned by getResponseCode() if the response code could not be retrieved. // Signal to the caller that something was wrong with the connection. throw new IOException("Could not retrieve response code from HttpUrlConnection."); } StatusLine responseStatus = new BasicStatusLine(protocolVersion, connection.getResponseCode(), connection.getResponseMessage()); BasicHttpResponse response = new BasicHttpResponse(responseStatus); response.setEntity(entityFromConnection(connection)); for (Entry<String, List<String>> header : connection.getHeaderFields().entrySet()) { if (header.getKey() != null) { Header h = new BasicHeader(header.getKey(), header.getValue().get(0)); response.addHeader(h); } } return response; } /** * Initializes an {@link HttpEntity} from the given {@link HttpURLConnection}. * @param connection * @return an HttpEntity populated with data from <code>connection</code>. */ private static HttpEntity entityFromConnection(HttpURLConnection connection) { BasicHttpEntity entity = new BasicHttpEntity(); InputStream inputStream; try { inputStream = connection.getInputStream(); } catch (IOException ioe) { inputStream = connection.getErrorStream(); } entity.setContent(inputStream); entity.setContentLength(connection.getContentLength()); entity.setContentEncoding(connection.getContentEncoding()); entity.setContentType(connection.getContentType()); return entity; } /** * Create an {@link HttpURLConnection} for the specified {@code url}. */ protected HttpURLConnection createConnection(URL url) throws IOException { return (HttpURLConnection) url.openConnection(); } /** * Opens an {@link HttpURLConnection} with parameters. * @param url * @return an open connection * @throws IOException */ private HttpURLConnection openConnection(URL url, Request<?> request) throws IOException { HttpURLConnection connection = createConnection(url); int timeoutMs = request.getTimeoutMs(); connection.setConnectTimeout(timeoutMs); connection.setReadTimeout(timeoutMs); connection.setUseCaches(false); connection.setDoInput(true); // use caller-provided custom SslSocketFactory, if any, for HTTPS if ("https".equals(url.getProtocol()) && mSslSocketFactory != null) { ((HttpsURLConnection)connection).setSSLSocketFactory(mSslSocketFactory); } return connection; } @SuppressWarnings("deprecation") /* package */ static void setConnectionParametersForRequest(HttpURLConnection connection, Request<?> request) throws IOException, AuthFailureError { switch (request.getMethod()) { case Method.DEPRECATED_GET_OR_POST: // This is the deprecated way that needs to be handled for backwards compatibility. // If the request's post body is null, then the assumption is that the request is // GET. Otherwise, it is assumed that the request is a POST. byte[] postBody = request.getPostBody(); if (postBody != null) { // Prepare output. There is no need to set Content-Length explicitly, // since this is handled by HttpURLConnection using the size of the prepared // output stream. connection.setDoOutput(true); connection.setRequestMethod("POST"); connection.addRequestProperty(HEADER_CONTENT_TYPE, request.getPostBodyContentType()); DataOutputStream out = new DataOutputStream(connection.getOutputStream()); out.write(postBody); out.close(); } break; case Method.GET: // Not necessary to set the request method because connection defaults to GET but // being explicit here. connection.setRequestMethod("GET"); break; case Method.DELETE: connection.setRequestMethod("DELETE"); break; case Method.POST: connection.setRequestMethod("POST"); addBodyIfExists(connection, request); break; case Method.PUT: connection.setRequestMethod("PUT"); addBodyIfExists(connection, request); break; case Method.HEAD: connection.setRequestMethod("HEAD"); break; case Method.OPTIONS: connection.setRequestMethod("OPTIONS"); break; case Method.TRACE: connection.setRequestMethod("TRACE"); break; case Method.PATCH: connection.setRequestMethod("PATCH"); addBodyIfExists(connection, request); break; default: throw new IllegalStateException("Unknown method type."); } } private static void addBodyIfExists(HttpURLConnection connection, Request<?> request) throws IOException, AuthFailureError { byte[] body = request.getBody(); if (body != null) { connection.setDoOutput(true); connection.addRequestProperty(HEADER_CONTENT_TYPE, request.getBodyContentType()); DataOutputStream out = new DataOutputStream(connection.getOutputStream()); out.write(body); out.close(); } } }
package com.fourinone; import java.util.ArrayList; import java.util.List; import java.io.Serializable; import java.rmi.RemoteException; //import java.rmi.ConnectException; final public class ParkProxy{ private static String sid = null; private Park pk; private ParkLeader pl = null; public ParkProxy(String host, int port, String sn) { pl = new ParkLeader(host,port,sn); pk = pl.getLeaderPark(); init(); } public ParkProxy(String host, int port, String[][] servers, String sn)//all server host and port:string[][] { pl = new ParkLeader(host,port,servers,sn); pk = pl.getLeaderPark();//(Park)BeanService.getBean(host,port,"ParkService");//try change pk if catch exception //new ParkLeader(host,port,String[][]) //pl.getMasterPark(){catch remoteexception and try next until get one}; init(); } // private class ObjectBeanProxy implements ObjectBean{ // private Object obj; // private Long vid; // private String name; // private ObjectBeanProxy(){} /*private ObjectBeanProxy(ObjValue ov, String domainnodekey){ vid = (Long)ov.getObj(domainnodekey+"._me_ta.version"); obj = ov.get(domainnodekey); name = domainnodekey; }*/ //@Delegate(interfaceName="com.fourinone.ObjectBean",methodName="toObject",policy=DelegatePolicy.Implements) // public Object toObject(){ // return obj; // } //@Delegate(interfaceName="com.fourinone.ObjectBean",methodName="getName",policy=DelegatePolicy.Implements) // public String getName(){ // return name; // } // public String toString(){ // return name+":"+obj.toString(); // } /*@Delegate(interfaceName="com.fourinone.ObjectVersion",methodName="getVid",policy=DelegatePolicy.Implements) public Long getVid(){ return vid; }*/ // } // private class ObjectBeanList<E> extends ArrayList implements List{ // private Long vid; // } private void init(){ try{ if(sid==null) sid = pk.getSessionId(); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[init]", e.toString()); } } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean create(String domain, Serializable obj){ return put(domain, System.nanoTime()+"", obj); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean put(String domain, String node, Serializable obj){ return put(domain, node, obj, AuthPolicy.OP_ALL); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean put(String domain, String node, Serializable obj, AuthPolicy auth){ return put(domain, node, obj, auth, false); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean create(String domain, String node, Serializable obj, boolean heartbeat){ return put(domain, node, obj, AuthPolicy.OP_ALL, heartbeat); } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="create",policy=DelegatePolicy.Implements) public ObjectBean put(String domain, String node, Serializable obj, AuthPolicy auth, boolean heartbeat){ return put(domain, node, obj, auth, heartbeat, 0); } public ObjectBean put(String domain, String node, Serializable obj, AuthPolicy auth, boolean heartbeat, int i) { ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node, obj)){ try{ ObjValue ov = pk.create(domain, node, ObjectBytes.toBytes(obj), sid, auth.getPolicy(), heartbeat); ob = OvToBean(ov,domain,node); if(ob!=null&&heartbeat) HbDaemo.runPutTask(pk, pl, domain, node, sid); //System.out.println("created..."); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[put]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = put(domain, node, obj, auth, heartbeat, i+1); //} } //if(e=LeaderException or java.rmi.ConnectException) //{pk=getNextMaster;ob = put(...); if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[put]", ((ClosetoOverException)e).print()); } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="update",policy=DelegatePolicy.Implements) public ObjectBean update(String domain, String node, Serializable obj){ return update(domain, node, obj, 0); } public ObjectBean update(String domain, String node, Serializable obj, int i) { ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node, obj)){ try{ ObjValue ov = pk.update(domain, node, ObjectBytes.toBytes(obj), sid); ob = OvToBean(ov, domain,node); }catch(Exception e){ LogUtil.info("[Park]", "[update]", e.getMessage()); //e.printStackTrace(); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = update(domain, node, obj,i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[update]", ((ClosetoOverException)e).print()); } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="get",policy=DelegatePolicy.Implements) public ObjectBean get(String domain, String node){ return get(domain, node, 0); } public ObjectBean get(String domain, String node, int i) { ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node)){ try{ ObjValue ov = pk.get(domain, node, sid);//getTestObj(); //ob = new ObjectBeanProxy(ov, domain,node); ob = OvToBean(ov, domain,node); }catch(Exception e){ LogUtil.info("[Park]", "[get]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = get(domain, node, i+1); //} } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="getLastest",policy=DelegatePolicy.Implements) public ObjectBean getLastest(String domain, String node, ObjectBean obold){ return getLastest(domain, node, obold, 0); } public ObjectBean getLastest(String domain, String node, ObjectBean obold, int i){ ObjectBean ob=null; if(ParkObjValue.checkGrammar(domain, node)){ try{ long vid = obold!=null?((ObjectBeanProxy)obold).vid:0l;//ObjectVersion //System.out.println("ob.vid:"+vid); ObjValue ov = pk.getLastest(domain, node, sid, vid); //System.out.println(ov); ob = OvToBean(ov, domain,node); //System.out.println(ob); }catch(Exception e){ LogUtil.info("[Park]", "[getLastest]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = getLastest(domain, node, obold, i+1); //} } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="get",policy=DelegatePolicy.Implements) public List<ObjectBean> getNodes(String domain){ return getNodes(domain, 0); } public List<ObjectBean> getNodes(String domain, int i) { List<ObjectBean> objlist = null; if(ParkObjValue.checkGrammar(domain)){ try{ ObjValue ov = pk.get(domain, null, sid);//getTestObj(); objlist = OvToBeanList(ov, domain); }catch(Exception e){ LogUtil.info("[Park]", "[getNodes]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) objlist = getNodes(domain, i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[getNodes]", ((ClosetoOverException)e).print()); } } } return objlist; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="getLastest",policy=DelegatePolicy.Implements) public List<ObjectBean> getNodesLastest(String domain, List<ObjectBean> oblist){ return getNodesLastest(domain, oblist, 0); } public List<ObjectBean> getNodesLastest(String domain, List<ObjectBean> oblist, int i){ List<ObjectBean> objlist = null; if(ParkObjValue.checkGrammar(domain)){ try{ long vid = oblist!=null?((ObjectBeanList)oblist).vid:0l; ObjValue ov = pk.getLastest(domain, null, sid, vid); //System.out.println("getNodesLastest:"+ov); objlist = OvToBeanList(ov, domain); }catch(Exception e){ LogUtil.info("[Park]", "[getNodesLastest]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) objlist = getNodesLastest(domain, oblist, i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[getNodesLastest]", ((ClosetoOverException)e).print()); } } } return objlist; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="delete",policy=DelegatePolicy.Implements) public ObjectBean remove(String domain, String node) { return remove(domain, node, 0); } public ObjectBean remove(String domain, String node, int i) { ObjectBean ob=null; //System.out.println("remove(String domain, String node):"+domain); if(ParkObjValue.checkGrammar(domain,node)){ try{ ObjValue ov = pk.delete(domain, node, sid); ob = OvToBean(ov, domain,node); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[delete]", e.getMessage()); if(e instanceof RemoteException){ //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) ob = remove(domain, node, i+1); //} } } } return ob; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="delete",policy=DelegatePolicy.Implements) public List<ObjectBean> remove(String domain){ return remove(domain,0); } public List<ObjectBean> remove(String domain, int i) { List<ObjectBean> objlist = null; if(ParkObjValue.checkGrammar(domain)){ try{ ObjValue ov = pk.delete(domain, null, sid); //System.out.println(ov); objlist = OvToBeanList(ov, domain); }catch(Exception e){ //e.printStackTrace(); LogUtil.info("[Park]", "[delete]", e.getMessage()); if(e instanceof RemoteException){//ConnectException //if(i<pl.groupserver.length) //{ pk = pl.getNextLeader(); if(pk!=null) objlist = remove(domain,i+1); //} } if(e instanceof ClosetoOverException){ LogUtil.info("[Park]", "[delete]", ((ClosetoOverException)e).print()); } } } return objlist; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="setDeletable",policy=DelegatePolicy.Implements) public boolean updateDomainAuth(String domain){ return updateDomainAuth(domain,0); } public boolean updateDomainAuth(String domain, int i){ boolean setflag = false; if(ParkObjValue.checkGrammar(domain)){ try{ setflag = pk.update(domain, AuthPolicy.OP_ALL.getPolicy(), sid); }catch(Exception e){ LogUtil.info("[Park]", "[setDeletable]", e.getMessage()); if(e instanceof RemoteException){//ConnectException pk = pl.getNextLeader(); if(pk!=null) setflag = updateDomainAuth(domain,i+1); } } } return setflag; } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="addLastestListener",policy=DelegatePolicy.Implements) public void addLastestListener(String domain, String node, ObjectBean ob, LastestListener liser) { final String dm = domain; final String nd = node; final ObjectBean oob = ob; final LastestListener lis = liser; new AsyncExector(){ public void task(){ try{ /*ObjectBean newob = null; while((newob=getLastest(dm, nd, oldob))==null); LogUtil.fine("[Park]","[Trim LastestEvent]","[obj]"); LastestEvent le = new LastestEvent(newob); lis.happenLastest(le);*/ ObjectBean oldob = oob; while(true){ ObjectBean newob = getLastest(dm, nd, oldob); if(newob!=null){ LogUtil.fine("[Park]","[Trim LastestEvent]","[obj]"); LastestEvent le = new LastestEvent(newob); if(lis.happenLastest(le)) break; oldob = (ObjectBean)le.getSource(); } } }catch(Exception e){ LogUtil.info("[Park]","[addLastestListener]",e); } } }.run();//ScheduledExecutorService or Thread.sleep(1) but time delay, so add time param } @Delegate(interfaceName="com.fourinone.ParkLocal",methodName="addLastestListener",policy=DelegatePolicy.Implements) public void addLastestListener(String domain, List<ObjectBean> oblist, LastestListener liser) { final String dm = domain; final List<ObjectBean> ols = oblist; final LastestListener lis = liser; new AsyncExector(){ public void task(){ try{ /*List<ObjectBean> newls = null; while((newls=getNodesLastest(dm, oldls))==null); LogUtil.fine("[Park]","[Trim LastestEvent]","[list]"); LastestEvent le = new LastestEvent(newls); lis.happenLastest(le);*/ List<ObjectBean> oldls = ols; while(true){ List<ObjectBean> newls = getNodesLastest(dm, oldls); if(newls!=null){ LogUtil.fine("[Park]","[Trim LastestEvent]","[list]"); LastestEvent le = new LastestEvent(newls); if(lis.happenLastest(le)) break; oldls = (List<ObjectBean>)le.getSource(); } } }catch(Exception e){ LogUtil.info("[Park]","[addLastestListener]",e); } } }.run(); } public ObjectBean OvToBean(ObjValue ov, String domain, String node){ if(ov!=null&&!ov.isEmpty()) { //System.out.println("OvToBean:"+ov); ObjectBeanProxy obp = new ObjectBeanProxy(); //ObjectBean ob = (ObjectBean)DelegateConsole.bind(new Class[]{ObjectBean.class,ObjectVersion.class}, new ObjectBeanProxy(ov, domainnodekey)); //ObjectBeanProxy obp = (ObjectBeanProxy)ob; String domainnodekey = ParkObjValue.getDomainnodekey(domain, node); obp.vid = (Long)ov.getObj(ParkMeta.getYBB(domainnodekey)); obp.obj = ObjectBytes.toObject((byte[])ov.get(domainnodekey)); obp.name = domainnodekey; return obp; }else return null; } public List<ObjectBean> OvToBeanList(ObjValue ov, String domain){ if(ov!=null&&!ov.isEmpty()) { ObjectBeanList<ObjectBean> objlist = new ObjectBeanList<ObjectBean>(); objlist.vid = (Long)ov.getObj(ParkMeta.getYBB(domain)); ObjValue nodeversion = ov.getWidely(ParkMeta.getYBB(domain+"..*")); ArrayList<String> nvnames = nodeversion.getObjNames(); for(String nvname:nvnames){ ObjectBeanProxy obp = new ObjectBeanProxy(); obp.vid = (Long)nodeversion.getObj(nvname); obp.name = nvname.substring(0,nvname.indexOf(ParkMeta.getYSJ())); obp.obj = ObjectBytes.toObject((byte[])ov.getObj(obp.name)); //ObjectBean ob = (ObjectBean)DelegateConsole.bind(new Class[]{ObjectBean.class,ObjectVersion.class}, obp); objlist.add(obp); } return objlist; }else return null; } /* private ObjValue getTestObj(){ ObjValue ov = new ObjValue(); ov.set("d","2"); ov.setObj("d._me_ta.version",11l); ov.set("d.n","aaa"); ov.setObj("d.n._me_ta.version",111l); ov.set("d.m","bbb"); ov.setObj("d.m._me_ta.version",222l); return ov; } */ public static void main(String[] args){ try{ //Park pk = (Park)BeanService.getBean("localhost",1888,"ParkService"); //System.out.println(pk.put(args[0], args[1], args[2], sid)); //ParkProxy pp = new ParkProxy(); //ParkLocal pp = DelegateHandle.bind(ParkLocal.class, ParkProxy.class); ParkLocal pp = BeanContext.getPark(); pp.create(args[0],args[1],args[2],AuthPolicy.OP_ALL,true); //pp.create("d","m","b",true); //pp.create("d","x","c",true); /*pp.create("d","n","a"); pp.create("d","m","b"); pp.create("d","x","c"); pp.create("d","y","d"); //System.out.println("ob_put:"+ob_put.toObject()); ObjectBean ob_get = pp.get("d","g"); System.out.println("ob_get:"+ob_get); if(ob_get!=null){ System.out.println("ob_get.toObject:"+ob_get.toObject()); System.out.println("obp.vid:"+((ObjectBeanProxy)ob_get).vid); } List<ObjectBean> oblist = pp.get("d"); for(ObjectBean obean:oblist) System.out.println("obean:"+obean.getName()); System.out.println(pp.getLastest("d","n",ob_get)); System.out.println(pp.getLastest("d",oblist)); System.out.println(pp.delete("d","n")); System.out.println(pp.delete("d")); */ /* ObjectBean ob = pp.get("d","n"); System.out.println("ob:"+ob.toObject()); System.out.println("ob:"+ob.getName()); ObjectBeanProxy obp = (ObjectBeanProxy)ob; System.out.println("obp:"+obp.vid); //System.out.println(pp.getLastest("d","n",null)); List<ObjectBean> lob = pp.getNodes(null);//"d" System.out.println("lob:"+lob); //for(ObjectBean obean:lob) //System.out.println("obean:"+obean.getName()); */ }catch(Exception e){ e.printStackTrace(); } } }
package org.sakaiproject.gradebookng.tool.panels.importExport; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.math.NumberUtils; import org.apache.wicket.Component; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.Button; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.spring.injection.annot.SpringBean; import org.sakaiproject.gradebookng.business.GradeSaveResponse; import org.sakaiproject.gradebookng.business.GradebookNgBusinessService; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItem; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemDetail; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemStatus; import org.sakaiproject.gradebookng.business.util.MessageHelper; import org.sakaiproject.gradebookng.tool.model.ImportWizardModel; import org.sakaiproject.gradebookng.tool.pages.GradebookPage; import org.sakaiproject.gradebookng.tool.pages.ImportExportPage; import org.sakaiproject.service.gradebook.shared.Assignment; import org.sakaiproject.service.gradebook.shared.AssignmentHasIllegalPointsException; import org.sakaiproject.service.gradebook.shared.ConflictingAssignmentNameException; import org.sakaiproject.service.gradebook.shared.ConflictingExternalIdException; import lombok.extern.slf4j.Slf4j; /** * Confirmation page for what is going to be imported */ @Slf4j public class GradeImportConfirmationStep extends Panel { private static final long serialVersionUID = 1L; @SpringBean(name = "org.sakaiproject.gradebookng.business.GradebookNgBusinessService") protected GradebookNgBusinessService businessService; private final String panelId; private final IModel<ImportWizardModel> model; public GradeImportConfirmationStep(final String id, final IModel<ImportWizardModel> importWizardModel) { super(id); this.panelId = id; this.model = importWizardModel; } @Override public void onInitialize() { super.onInitialize(); // unpack model final ImportWizardModel importWizardModel = this.model.getObject(); final List<ProcessedGradeItem> itemsToCreate = importWizardModel.getItemsToCreate(); final List<ProcessedGradeItem> itemsToUpdate = importWizardModel.getItemsToUpdate(); final List<ProcessedGradeItem> itemsToModify = importWizardModel.getItemsToModify(); final List<Assignment> assignmentsToCreate = importWizardModel.getAssignmentsToCreate(); final Form<?> form = new Form("form") { private static final long serialVersionUID = 1L; boolean errors = false; @Override protected void onSubmit() { final Map<String, Long> assignmentMap = new HashMap<>(); // Create new GB items assignmentsToCreate.forEach(assignment -> { Long assignmentId = null; try { assignmentId = GradeImportConfirmationStep.this.businessService.addAssignment(assignment); } catch (final AssignmentHasIllegalPointsException e) { getSession().error(new ResourceModel("error.addgradeitem.points").getObject()); this.errors = true; } catch (final ConflictingAssignmentNameException e) { getSession().error(new ResourceModel("error.addgradeitem.title").getObject()); this.errors = true; } catch (final ConflictingExternalIdException e) { getSession().error(new ResourceModel("error.addgradeitem.exception").getObject()); this.errors = true; } catch (final Exception e) { getSession().error(new ResourceModel("error.addgradeitem.exception").getObject()); this.errors = true; } assignmentMap.put(StringUtils.trim(assignment.getName()), assignmentId); }); //Modify any that need modification itemsToModify.forEach(item -> { final Double points = NumberUtils.toDouble(item.getItemPointValue()); final Assignment assignment = GradeImportConfirmationStep.this.businessService.getAssignment(item.getItemTitle()); assignment.setPoints(points); final boolean updated = GradeImportConfirmationStep.this.businessService.updateAssignment(assignment); if(!updated) { getSession().error(MessageHelper.getString("importExport.error.pointsmodification", assignment.getName())); this.errors = true; } assignmentMap.put(StringUtils.trim(assignment.getName()), assignment.getId()); }); // add/update the data if (!this.errors) { final List<ProcessedGradeItem> itemsToSave = new ArrayList<ProcessedGradeItem>(); itemsToSave.addAll(itemsToUpdate); itemsToSave.addAll(itemsToCreate); itemsToSave.addAll(itemsToModify); itemsToSave.forEach(processedGradeItem -> { log.debug("Looping through items to save"); final List<ProcessedGradeItemDetail> processedGradeItemDetails = processedGradeItem.getProcessedGradeItemDetails(); processedGradeItemDetails.forEach(processedGradeItemDetail -> { log.debug("Looping through detail items to save"); //get data // if its an update/modify, this will get the id Long assignmentId = processedGradeItem.getItemId(); //if assignment title was modified, we need to use that instead final String assignmentTitle = StringUtils.trim((processedGradeItem.getAssignmentTitle() != null) ? processedGradeItem.getAssignmentTitle() : processedGradeItem.getItemTitle()); // a newly created assignment will have a null ID here and need a lookup from the map to get the ID if (assignmentId == null) { assignmentId = assignmentMap.get(assignmentTitle); } //TODO if assignmentId is still null, there will be a problem final GradeSaveResponse saveResponse = GradeImportConfirmationStep.this.businessService.saveGrade(assignmentId, processedGradeItemDetail.getStudentUuid(), processedGradeItemDetail.getGrade(), processedGradeItemDetail.getComment()); // handle the response types switch(saveResponse) { case OK: // sweet break; case OVER_LIMIT: // no worries! break; case NO_CHANGE: // Try to save just the comments final String currentComment = StringUtils.trimToNull(GradeImportConfirmationStep.this.businessService.getAssignmentGradeComment(assignmentId, processedGradeItemDetail.getStudentUuid())); final String newComment = StringUtils.trimToNull(processedGradeItemDetail.getComment()); if (!StringUtils.equals(currentComment, newComment)) { final boolean success = GradeImportConfirmationStep.this.businessService.updateAssignmentGradeComment(assignmentId, processedGradeItemDetail.getStudentUuid(), newComment); log.info("Saving comment: " + success + ", " + assignmentId + ", "+ processedGradeItemDetail.getStudentEid() + ", " + processedGradeItemDetail.getComment()); if (!success) { getSession().error(new ResourceModel("importExport.error.comment").getObject()); this.errors = true; } } break; case CONCURRENT_EDIT: // this will be handled eventually break; case ERROR: // uh oh getSession().error(new ResourceModel("importExport.error.grade").getObject()); this.errors = true; break; default: break; } log.info("Saving grade for assignment id: " + assignmentId + ", student: " + processedGradeItemDetail.getStudentEid() + ", grade: " + processedGradeItemDetail.getGrade() + ", comment: " + processedGradeItemDetail.getComment() + ", status: " + saveResponse); }); }); } if (!this.errors) { getSession().success(getString("importExport.confirmation.success")); setResponsePage(GradebookPage.class); } //auto refresh will render the errors } }; add(form); // back button final Button backButton = new Button("backbutton") { private static final long serialVersionUID = 1L; @Override public void onSubmit() { // clear any previous errors final ImportExportPage page = (ImportExportPage) getPage(); page.clearFeedback(); Component newPanel = null; if (assignmentsToCreate.size() > 0) { newPanel = new CreateGradeItemStep(GradeImportConfirmationStep.this.panelId, Model.of(importWizardModel)); } else { newPanel = new GradeItemImportSelectionStep(GradeImportConfirmationStep.this.panelId, Model.of(importWizardModel)); } newPanel.setOutputMarkupId(true); GradeImportConfirmationStep.this.replaceWith(newPanel); } }; backButton.setDefaultFormProcessing(false); form.add(backButton); // cancel button final Button cancelButton = new Button("cancelbutton") { private static final long serialVersionUID = 1L; @Override public void onSubmit() { // clear any previous errors final ImportExportPage page = (ImportExportPage) getPage(); page.clearFeedback(); setResponsePage(ImportExportPage.class); } }; cancelButton.setDefaultFormProcessing(false); form.add(cancelButton); // finish button form.add(new Button("finishbutton")); // render items to be updated final boolean hasItemsToUpdate = !itemsToUpdate.isEmpty(); final WebMarkupContainer gradesUpdateContainer = new WebMarkupContainer("grades_update_container") { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return hasItemsToUpdate; } }; add(gradesUpdateContainer); if (hasItemsToUpdate) { final ListView<ProcessedGradeItem> updateList = makeListView("grades_update", itemsToUpdate); updateList.setReuseItems(true); gradesUpdateContainer.add(updateList); } // render items to be created final boolean hasItemsToCreate = !itemsToCreate.isEmpty(); final WebMarkupContainer gradesCreateContainer = new WebMarkupContainer("grades_create_container") { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return hasItemsToCreate; } }; add(gradesCreateContainer); if (hasItemsToCreate) { final ListView<ProcessedGradeItem> createList = makeListView("grades_create", itemsToCreate); createList.setReuseItems(true); gradesCreateContainer.add(createList); } // render items to be created final boolean hasItemsToModify = !itemsToModify.isEmpty(); final WebMarkupContainer gradesModifyContainer = new WebMarkupContainer("grades_modify_container") { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return hasItemsToModify; } }; add(gradesModifyContainer); if (hasItemsToModify) { final ListView<ProcessedGradeItem> modifyList = makeListView("grades_modify", itemsToModify); modifyList.setReuseItems(true); gradesModifyContainer.add(modifyList); } } /** * Helper to create a listview for what needs to be shown * @param markupId wicket markup id * @param itemList ist of stuff * @return */ private ListView<ProcessedGradeItem> makeListView(final String markupId, final List<ProcessedGradeItem> itemList) { final ListView<ProcessedGradeItem> rval = new ListView<ProcessedGradeItem>(markupId, itemList) { private static final long serialVersionUID = 1L; @Override protected void populateItem(final ListItem<ProcessedGradeItem> item) { final ProcessedGradeItem gradeItem = item.getModelObject(); // ensure we display the edited data if we have it (won't exist for an update) final String assignmentTitle = gradeItem.getAssignmentTitle(); final Double assignmentPoints = gradeItem.getAssignmentPoints(); item.add(new Label("itemTitle", (assignmentTitle != null) ? assignmentTitle : gradeItem.getItemTitle())); item.add(new Label("itemPointValue", (assignmentPoints != null) ? assignmentPoints : gradeItem.getItemPointValue())); //if comment and it's being updated, add additional row if (gradeItem.getType() == ProcessedGradeItem.Type.COMMENT && gradeItem.getCommentStatus().getStatusCode() != ProcessedGradeItemStatus.STATUS_NA) { item.add(new Behavior() { private static final long serialVersionUID = 1L; @Override public void afterRender(final Component component) { super.afterRender(component); component.getResponse().write("<tr class=\"comment\"><td class=\"item_title\" colspan=\"2\"><span>" + getString("importExport.commentname") + "</span></td></tr>"); } }); } } }; return rval; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.managers.checkpoint; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeTaskSessionScope; import org.apache.ignite.events.CheckpointEvent; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.GridTaskSessionImpl; import org.apache.ignite.internal.GridTaskSessionInternal; import org.apache.ignite.internal.SkipDaemon; import org.apache.ignite.internal.managers.GridManagerAdapter; import org.apache.ignite.internal.managers.communication.GridIoManager; import org.apache.ignite.internal.managers.communication.GridIoPolicy; import org.apache.ignite.internal.managers.communication.GridMessageListener; import org.apache.ignite.internal.util.GridBoundedConcurrentLinkedHashSet; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.marshaller.Marshaller; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.checkpoint.CheckpointListener; import org.apache.ignite.spi.checkpoint.CheckpointSpi; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.events.EventType.EVT_CHECKPOINT_LOADED; import static org.apache.ignite.events.EventType.EVT_CHECKPOINT_REMOVED; import static org.apache.ignite.events.EventType.EVT_CHECKPOINT_SAVED; import static org.apache.ignite.internal.GridTopic.TOPIC_CHECKPOINT; import static org.jsr166.ConcurrentLinkedHashMap.QueuePolicy.PER_SEGMENT_Q; /** * This class defines a checkpoint manager. */ @SkipDaemon @SuppressWarnings({"SynchronizationOnLocalVariableOrMethodParameter", "deprecation"}) public class GridCheckpointManager extends GridManagerAdapter<CheckpointSpi> { /** Max closed topics to store. */ public static final int MAX_CLOSED_SESS = 10240; /** */ private final GridMessageListener lsnr = new CheckpointRequestListener(); /** */ private final ConcurrentMap<IgniteUuid, CheckpointSet> keyMap; /** */ private final Collection<IgniteUuid> closedSess; /** Grid marshaller. */ private final Marshaller marsh; /** * @param ctx Grid kernal context. */ public GridCheckpointManager(GridKernalContext ctx) { super(ctx, ctx.config().getCheckpointSpi()); marsh = ctx.config().getMarshaller(); if (enabled()) { keyMap = new ConcurrentHashMap<>(); closedSess = new GridBoundedConcurrentLinkedHashSet<>(MAX_CLOSED_SESS, MAX_CLOSED_SESS, 0.75f, 256, PER_SEGMENT_Q); } else { keyMap = null; closedSess = null; } } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { for (CheckpointSpi spi : getSpis()) { spi.setCheckpointListener(new CheckpointListener() { @Override public void onCheckpointRemoved(String key) { record(EVT_CHECKPOINT_REMOVED, key); } }); } startSpi(); ctx.io().addMessageListener(TOPIC_CHECKPOINT, lsnr); if (log.isDebugEnabled()) log.debug(startInfo()); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { if (ctx.config().isDaemon()) return; GridIoManager comm = ctx.io(); if (comm != null) comm.removeMessageListener(TOPIC_CHECKPOINT, lsnr); stopSpi(); if (log.isDebugEnabled()) log.debug(stopInfo()); } /** * @return Session IDs. */ public Collection<IgniteUuid> sessionIds() { return enabled() ? new ArrayList<>(keyMap.keySet()) : Collections.<IgniteUuid>emptyList(); } /** * @param ses Task session. * @param key Checkpoint key. * @param state Checkpoint state to save. * @param scope Checkpoint scope. * @param timeout Checkpoint timeout. * @param override Whether or not override checkpoint if it already exists. * @return {@code true} if checkpoint has been actually saved, {@code false} otherwise. * @throws IgniteCheckedException Thrown in case of any errors. */ public boolean storeCheckpoint(GridTaskSessionInternal ses, String key, Object state, ComputeTaskSessionScope scope, long timeout, boolean override) throws IgniteCheckedException { if (!enabled()) return false; assert ses != null; assert key != null; long now = U.currentTimeMillis(); boolean saved = false; try { switch (scope) { case GLOBAL_SCOPE: { byte[] data = state == null ? null : U.marshal(marsh, state); saved = getSpi(ses.getCheckpointSpi()).saveCheckpoint(key, data, timeout, override); if (saved) record(EVT_CHECKPOINT_SAVED, key); break; } case SESSION_SCOPE: { if (closedSess.contains(ses.getId())) { U.warn(log, S.toString("Checkpoint will not be saved due to session invalidation", "key", key, true, "val", state, true, "ses", ses, false), "Checkpoint will not be saved due to session invalidation."); break; } if (now > ses.getEndTime()) { U.warn(log, S.toString("Checkpoint will not be saved due to session timeout", "key", key, true, "val", state, true, "ses", ses, false), "Checkpoint will not be saved due to session timeout."); break; } if (now + timeout > ses.getEndTime() || now + timeout < 0) timeout = ses.getEndTime() - now; // Save it first to avoid getting null value on another node. byte[] data = state == null ? null : U.marshal(marsh, state); Set<String> keys = keyMap.get(ses.getId()); if (keys == null) { Set<String> old = keyMap.putIfAbsent(ses.getId(), (CheckpointSet)(keys = new CheckpointSet(ses.session()))); if (old != null) keys = old; // Double check. if (closedSess.contains(ses.getId())) { U.warn(log, S.toString("Checkpoint will not be saved due to session invalidation", "key", key, true, "val", state, true, "ses", ses, false), "Checkpoint will not be saved due to session invalidation."); keyMap.remove(ses.getId(), keys); break; } } if (log.isDebugEnabled()) log.debug(S.toString("Resolved keys for session", "keys", keys, true, "ses", ses, false, "keyMap", keyMap, false)); // Note: Check that keys exists because session may be invalidated during saving // checkpoint from GridFuture. if (keys != null) { // Notify master node. if (ses.getJobId() != null) { ClusterNode node = ctx.discovery().node(ses.getTaskNodeId()); if (node != null) ctx.io().sendToGridTopic( node, TOPIC_CHECKPOINT, new GridCheckpointRequest(ses.getId(), key, ses.getCheckpointSpi()), GridIoPolicy.PUBLIC_POOL); } saved = getSpi(ses.getCheckpointSpi()).saveCheckpoint(key, data, timeout, override); if (saved) { keys.add(key); record(EVT_CHECKPOINT_SAVED, key); } } break; } default: assert false : "Unknown checkpoint scope: " + scope; } } catch (IgniteSpiException e) { throw new IgniteCheckedException(S.toString("Failed to save checkpoint", "key", key, true, "val", state, true, "scope", scope, false, "timeout", timeout, false), e); } return saved; } /** * @param key Checkpoint key. * @return Whether or not checkpoint was removed. */ public boolean removeCheckpoint(String key) { if (!enabled()) return false; assert key != null; boolean rmv = false; for (CheckpointSpi spi : getSpis()) if (spi.removeCheckpoint(key)) rmv = true; return rmv; } /** * @param ses Task session. * @param key Checkpoint key. * @return Whether or not checkpoint was removed. */ public boolean removeCheckpoint(GridTaskSessionInternal ses, String key) { if (!enabled()) return false; assert ses != null; assert key != null; Set<String> keys = keyMap.get(ses.getId()); boolean rmv = false; // Note: Check that keys exists because session may be invalidated during removing // checkpoint from GridFuture. if (keys != null) { keys.remove(key); rmv = getSpi(ses.getCheckpointSpi()).removeCheckpoint(key); } else if (log.isDebugEnabled()) log.debug(S.toString("Checkpoint will not be removed (key map not found)", "key", key, true, "ses", ses, false)); return rmv; } /** * @param ses Task session. * @param key Checkpoint key. * @return Loaded checkpoint. * @throws IgniteCheckedException Thrown in case of any errors. */ @Nullable public Serializable loadCheckpoint(GridTaskSessionInternal ses, String key) throws IgniteCheckedException { if (!enabled()) return null; assert ses != null; assert key != null; try { byte[] data = getSpi(ses.getCheckpointSpi()).loadCheckpoint(key); Serializable state = null; // Always deserialize with task/session class loader. if (data != null) state = U.unmarshal(marsh, data, U.resolveClassLoader(ses.getClassLoader(), ctx.config())); record(EVT_CHECKPOINT_LOADED, key); return state; } catch (IgniteSpiException e) { throw new IgniteCheckedException(S.INCLUDE_SENSITIVE ? ("Failed to load checkpoint: " + key) : "Failed to load checkpoint", e); } } /** * @param ses Task session. * @param cleanup Whether cleanup or not. */ public void onSessionEnd(GridTaskSessionInternal ses, boolean cleanup) { if (!enabled()) return; closedSess.add(ses.getId()); // If on task node. if (ses.getJobId() == null) { Set<String> keys = keyMap.remove(ses.getId()); if (keys != null) { for (String key : keys) getSpi(ses.getCheckpointSpi()).removeCheckpoint(key); } } // If on job node. else if (cleanup) { // Clean up memory. CheckpointSet keys = keyMap.get(ses.getId()); // Make sure that we don't remove checkpoint set that // was created by newly created session. if (keys != null && keys.session() == ses.session()) keyMap.remove(ses.getId(), keys); } } /** * @param type Event type. * @param key Checkpoint key. */ private void record(int type, String key) { if (ctx.event().isRecordable(type)) { String msg; if (type == EVT_CHECKPOINT_SAVED) msg = "Checkpoint saved"; else if (type == EVT_CHECKPOINT_LOADED) msg = "Checkpoint loaded"; else { assert type == EVT_CHECKPOINT_REMOVED : "Invalid event type: " + type; msg = "Checkpoint removed"; } if (S.INCLUDE_SENSITIVE) msg += ": " + key; ctx.event().record(new CheckpointEvent(ctx.discovery().localNode(), msg, type, key)); } } /** {@inheritDoc} */ @Override public void printMemoryStats() { X.println(">>>"); X.println(">>> Checkpoint manager memory stats [igniteInstanceName=" + ctx.igniteInstanceName() + ']'); X.println(">>> keyMap: " + (keyMap != null ? keyMap.size() : 0)); } /** * Checkpoint set. */ private static class CheckpointSet extends GridConcurrentHashSet<String> { /** */ private static final long serialVersionUID = 0L; /** Session. */ @GridToStringInclude private final GridTaskSessionInternal ses; /** * @param ses Session. */ private CheckpointSet(GridTaskSessionInternal ses) { this.ses = ses; } /** * @return Session. */ GridTaskSessionInternal session() { return ses; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(CheckpointSet.class, this); } } /** * */ private class CheckpointRequestListener implements GridMessageListener { /** * @param nodeId ID of the node that sent this message. * @param msg Received message. */ @SuppressWarnings({"MismatchedQueryAndUpdateOfCollection"}) @Override public void onMessage(UUID nodeId, Object msg, byte plc) { GridCheckpointRequest req = (GridCheckpointRequest)msg; if (log.isDebugEnabled()) log.debug("Received checkpoint request: " + req); if (!enabled()) return; IgniteUuid sesId = req.getSessionId(); if (closedSess.contains(sesId)) { getSpi(req.getCheckpointSpi()).removeCheckpoint(req.getKey()); return; } Set<String> keys = keyMap.get(sesId); if (keys == null) { GridTaskSessionImpl ses = ctx.session().getSession(sesId); if (ses == null) { getSpi(req.getCheckpointSpi()).removeCheckpoint(req.getKey()); return; } Set<String> old = keyMap.putIfAbsent(sesId, (CheckpointSet)(keys = new CheckpointSet(ses))); if (old != null) keys = old; } keys.add(req.getKey()); // Double check. if (closedSess.contains(sesId)) { keyMap.remove(sesId, keys); getSpi(req.getCheckpointSpi()).removeCheckpoint(req.getKey()); } } } }
/* * Copyright (c) 2006-2013, KNOPFLERFISH project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * - Neither the name of the KNOPFLERFISH project nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * @author Erik Wistrand * @author Philippe Laporte */ package org.knopflerfish.bundle.metatype; import java.net.URL; import java.util.Dictionary; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; import java.util.Properties; import java.util.Vector; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.cm.ManagedService; import org.osgi.service.metatype.MetaTypeProvider; import org.osgi.service.metatype.MetaTypeService; import org.knopflerfish.service.log.LogRef; import org.knopflerfish.util.metatype.KFLegacyMetaTypeParser; import org.knopflerfish.util.metatype.MTP; import org.knopflerfish.util.metatype.OCD; import org.knopflerfish.util.metatype.SystemMetatypeProvider; public class Activator implements BundleActivator { BundleContext bc; LogRef log; SystemMetatypeProvider sysMTP; SysPropMetatypeProvider spMTP; Map<ServiceRegistration<MetaTypeProvider>, MTP> confMtpRegs = new HashMap<ServiceRegistration<MetaTypeProvider>, MTP>(); public void start(BundleContext _bc) { this.bc = _bc; this.log = new LogRef(bc); sysMTP = new SystemMetatypeProvider(bc, confMtpRegs); sysMTP.open(); bc.registerService(new String[] { SystemMetatypeProvider.class.getName(), MetaTypeProvider.class.getName(), MetaTypeService.class.getName() }, sysMTP, (Dictionary<String, ?>) null); final ManagedService config = new ManagedService() { public void updated(Dictionary<String, ?> props) { synchronized (confMtpRegs) { Vector<String> urls = null; if (props != null) { @SuppressWarnings("unchecked") final Vector<String> value = (Vector<String>) props.get("external.metatype.urls"); urls = value; } if (urls == null) { urls = new Vector<String>(); } final MTP[] mtp = new MTP[urls.size()]; try { for (int i = 0; i < urls.size(); i++) { final URL url = new URL(urls.elementAt(i)); mtp[i] = KFLegacyMetaTypeParser.loadMTPFromURL(bc.getBundle(), url); } for (final ServiceRegistration<?> reg : confMtpRegs.keySet()) { reg.unregister(); } confMtpRegs.clear(); for (int i = 0; i < mtp.length; i++) { final Dictionary<String, Object> prop = new Hashtable<String, Object>(); prop.put("source.url", urls.elementAt(i)); String[] pids = mtp[i].getPids(); if (pids != null) { prop.put("service.pids", pids); } pids = mtp[i].getFactoryPids(); if (pids != null) { prop.put("factory.pids", pids); } final ServiceRegistration<MetaTypeProvider> reg = bc.registerService(MetaTypeProvider.class, mtp[i], prop); confMtpRegs.put(reg, mtp[i]); } } catch (final Exception e) { log.error("Failed to set values", e); } } // synchronized } // method }; final Dictionary<String, String> props = new Hashtable<String, String>(); props.put("service.pid", "org.knopflerfish.util.metatype.SystemMetatypeProvider"); bc.registerService(ManagedService.class, config, props); setupSystemProps(); } void setupSystemProps() { final ManagedService config = new ManagedService() { public void updated(Dictionary<String, ?> props) { if (props != null) { for (final Enumeration<String> e = props.keys(); e.hasMoreElements();) { final String key = e.nextElement(); final Object val = props.get(key); if (val != null) { try { System.setProperty(key, val.toString()); } catch (final Exception ex) { log.error("Failed to set system property '" + key + "' to " + val, ex); } } } } } }; final Dictionary<String, String> props = new Hashtable<String, String>(); props.put("service.pid", SysPropMetatypeProvider.PID); bc.registerService(ManagedService.class, config, props); spMTP = new SysPropMetatypeProvider(bc); bc.registerService(new String[] { MetaTypeProvider.class.getName() }, spMTP, new Hashtable<String,Object>() { private static final long serialVersionUID = 1L; { put("service.pids", spMTP.getPids()); } }); } public void stop(BundleContext bc) { sysMTP.close(); this.log.close(); this.log = null; this.bc = null; } } class SysPropMetatypeProvider extends MTP { OCD spOCD; static final String PID = "java.system.properties"; SysPropMetatypeProvider(BundleContext bc) { super("System properties"); final Dictionary<String, Object> defProps = new Hashtable<String, Object>(); final Properties sysProps = System.getProperties(); for (final Enumeration<?> e = sysProps.keys(); e.hasMoreElements();) { final String key = (String) e.nextElement(); // Use the local value for the current framework instance; props // that have not been exported with some value as system // properties will not be visible due to the limitation of // BundleContex.getProprty() on OSGi R4. final Object val = bc.getProperty(key); if (key.startsWith("java.") || key.startsWith("os.") || key.startsWith("sun.") || key.startsWith("awt.") || key.startsWith("user.")) { continue; } if (null != val) { defProps.put(key, val); } } spOCD = new OCD(PID, PID, "Java system properties", defProps); addService(PID, spOCD); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.hbase.io.compress; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionInputStream; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.DoNotPool; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.util.ReflectionUtils; /** * Compression related stuff. * Copied from hadoop-3315 tfile. */ @InterfaceAudience.Private public final class Compression { static final Log LOG = LogFactory.getLog(Compression.class); /** * Prevent the instantiation of class. */ private Compression() { super(); } static class FinishOnFlushCompressionStream extends FilterOutputStream { public FinishOnFlushCompressionStream(CompressionOutputStream cout) { super(cout); } @Override public void write(byte b[], int off, int len) throws IOException { out.write(b, off, len); } @Override public void flush() throws IOException { CompressionOutputStream cout = (CompressionOutputStream) out; cout.finish(); cout.flush(); cout.resetState(); } } /** * Returns the classloader to load the Codec class from. */ private static ClassLoader getClassLoaderForCodec() { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) { cl = Compression.class.getClassLoader(); } if (cl == null) { cl = ClassLoader.getSystemClassLoader(); } if (cl == null) { throw new RuntimeException("A ClassLoader to load the Codec could not be determined"); } return cl; } /** * Compression algorithms. The ordinal of these cannot change or else you * risk breaking all existing HFiles out there. Even the ones that are * not compressed! (They use the NONE algorithm) */ @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="SE_TRANSIENT_FIELD_NOT_RESTORED", justification="We are not serializing so doesn't apply (not sure why transient though)") @InterfaceAudience.Public @InterfaceStability.Evolving public static enum Algorithm { LZO("lzo") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lzoCodec; private transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (lzoCodec == null) { synchronized (lock) { if (lzoCodec == null) { lzoCodec = buildCodec(conf); } } } return lzoCodec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = ClassLoader.getSystemClassLoader() .loadClass("com.hadoop.compression.lzo.LzoCodec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, new Configuration(conf)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }, GZ("gz") { private volatile transient GzipCodec codec; private transient Object lock = new Object(); @Override DefaultCodec getCodec(Configuration conf) { if (codec == null) { synchronized (lock) { if (codec == null) { codec = buildCodec(conf); } } } return codec; } private GzipCodec buildCodec(Configuration conf) { GzipCodec gzcodec = new ReusableStreamGzipCodec(); gzcodec.setConf(new Configuration(conf)); return gzcodec; } }, NONE("none") { @Override DefaultCodec getCodec(Configuration conf) { return null; } @Override public synchronized InputStream createDecompressionStream( InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException { if (downStreamBufferSize > 0) { return new BufferedInputStream(downStream, downStreamBufferSize); } // else { // Make sure we bypass FSInputChecker buffer. // return new BufferedInputStream(downStream, 1024); // } // } return downStream; } @Override public synchronized OutputStream createCompressionStream( OutputStream downStream, Compressor compressor, int downStreamBufferSize) throws IOException { if (downStreamBufferSize > 0) { return new BufferedOutputStream(downStream, downStreamBufferSize); } return downStream; } }, SNAPPY("snappy") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec snappyCodec; private transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (snappyCodec == null) { synchronized (lock) { if (snappyCodec == null) { snappyCodec = buildCodec(conf); } } } return snappyCodec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = ClassLoader.getSystemClassLoader() .loadClass("org.apache.hadoop.io.compress.SnappyCodec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }, LZ4("lz4") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lz4Codec; private transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { if (lz4Codec == null) { synchronized (lock) { if (lz4Codec == null) { lz4Codec = buildCodec(conf); } } } return lz4Codec; } private CompressionCodec buildCodec(Configuration conf) { try { Class<?> externalCodec = getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec"); return (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } }; private final Configuration conf; private final String compressName; // data input buffer size to absorb small reads from application. private static final int DATA_IBUF_SIZE = 1 * 1024; // data output buffer size to absorb small writes from application. private static final int DATA_OBUF_SIZE = 4 * 1024; Algorithm(String name) { this.conf = new Configuration(); this.conf.setBoolean("hadoop.native.lib", true); this.compressName = name; } abstract CompressionCodec getCodec(Configuration conf); public InputStream createDecompressionStream( InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException { CompressionCodec codec = getCodec(conf); // Set the internal buffer size to read from down stream. if (downStreamBufferSize > 0) { ((Configurable)codec).getConf().setInt("io.file.buffer.size", downStreamBufferSize); } CompressionInputStream cis = codec.createInputStream(downStream, decompressor); BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE); return bis2; } public OutputStream createCompressionStream( OutputStream downStream, Compressor compressor, int downStreamBufferSize) throws IOException { OutputStream bos1 = null; if (downStreamBufferSize > 0) { bos1 = new BufferedOutputStream(downStream, downStreamBufferSize); } else { bos1 = downStream; } CompressionOutputStream cos = createPlainCompressionStream(bos1, compressor); BufferedOutputStream bos2 = new BufferedOutputStream(new FinishOnFlushCompressionStream(cos), DATA_OBUF_SIZE); return bos2; } /** * Creates a compression stream without any additional wrapping into * buffering streams. */ public CompressionOutputStream createPlainCompressionStream( OutputStream downStream, Compressor compressor) throws IOException { CompressionCodec codec = getCodec(conf); ((Configurable)codec).getConf().setInt("io.file.buffer.size", 32 * 1024); return codec.createOutputStream(downStream, compressor); } public Compressor getCompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Compressor compressor = CodecPool.getCompressor(codec); if (compressor != null) { if (compressor.finished()) { // Somebody returns the compressor to CodecPool but is still using // it. LOG .warn("Compressor obtained from CodecPool is already finished()"); // throw new AssertionError( // "Compressor obtained from CodecPool is already finished()"); } compressor.reset(); } return compressor; } return null; } public void returnCompressor(Compressor compressor) { if (compressor != null) { CodecPool.returnCompressor(compressor); } } public Decompressor getDecompressor() { CompressionCodec codec = getCodec(conf); if (codec != null) { Decompressor decompressor = CodecPool.getDecompressor(codec); if (decompressor != null) { if (decompressor.finished()) { // Somebody returns the decompressor to CodecPool but is still using // it. LOG .warn("Deompressor obtained from CodecPool is already finished()"); // throw new AssertionError( // "Decompressor obtained from CodecPool is already finished()"); } decompressor.reset(); } return decompressor; } return null; } public void returnDecompressor(Decompressor decompressor) { if (decompressor != null) { CodecPool.returnDecompressor(decompressor); if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) { decompressor.end(); } } } public String getName() { return compressName; } } public static Algorithm getCompressionAlgorithmByName(String compressName) { Algorithm[] algos = Algorithm.class.getEnumConstants(); for (Algorithm a : algos) { if (a.getName().equals(compressName)) { return a; } } throw new IllegalArgumentException( "Unsupported compression algorithm name: " + compressName); } /** * Get names of supported compression algorithms. * * @return Array of strings, each represents a supported compression * algorithm. Currently, the following compression algorithms are supported. */ public static String[] getSupportedAlgorithms() { Algorithm[] algos = Algorithm.class.getEnumConstants(); String[] ret = new String[algos.length]; int i = 0; for (Algorithm a : algos) { ret[i++] = a.getName(); } return ret; } /** * Decompresses data from the given stream using the configured compression * algorithm. It will throw an exception if the dest buffer does not have * enough space to hold the decompressed data. * * @param dest * the output bytes buffer * @param destOffset * start writing position of the output buffer * @param bufferedBoundedStream * a stream to read compressed data from, bounded to the exact amount * of compressed data * @param compressedSize * compressed data size, header not included * @param uncompressedSize * uncompressed data size, header not included * @param compressAlgo * compression algorithm used * @throws IOException */ public static void decompress(byte[] dest, int destOffset, InputStream bufferedBoundedStream, int compressedSize, int uncompressedSize, Compression.Algorithm compressAlgo) throws IOException { if (dest.length - destOffset < uncompressedSize) { throw new IllegalArgumentException( "Output buffer does not have enough space to hold " + uncompressedSize + " decompressed bytes, available: " + (dest.length - destOffset)); } Decompressor decompressor = null; try { decompressor = compressAlgo.getDecompressor(); InputStream is = compressAlgo.createDecompressionStream( bufferedBoundedStream, decompressor, 0); IOUtils.readFully(is, dest, destOffset, uncompressedSize); is.close(); } finally { if (decompressor != null) { compressAlgo.returnDecompressor(decompressor); } } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceResult; /** * */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeDhcpOptionsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * Information about one or more DHCP options sets. * </p> */ private com.amazonaws.internal.SdkInternalList<DhcpOptions> dhcpOptions; /** * <p> * The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more * results to return. * </p> */ private String nextToken; /** * <p> * Information about one or more DHCP options sets. * </p> * * @return Information about one or more DHCP options sets. */ public java.util.List<DhcpOptions> getDhcpOptions() { if (dhcpOptions == null) { dhcpOptions = new com.amazonaws.internal.SdkInternalList<DhcpOptions>(); } return dhcpOptions; } /** * <p> * Information about one or more DHCP options sets. * </p> * * @param dhcpOptions * Information about one or more DHCP options sets. */ public void setDhcpOptions(java.util.Collection<DhcpOptions> dhcpOptions) { if (dhcpOptions == null) { this.dhcpOptions = null; return; } this.dhcpOptions = new com.amazonaws.internal.SdkInternalList<DhcpOptions>(dhcpOptions); } /** * <p> * Information about one or more DHCP options sets. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setDhcpOptions(java.util.Collection)} or {@link #withDhcpOptions(java.util.Collection)} if you want to * override the existing values. * </p> * * @param dhcpOptions * Information about one or more DHCP options sets. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDhcpOptionsResult withDhcpOptions(DhcpOptions... dhcpOptions) { if (this.dhcpOptions == null) { setDhcpOptions(new com.amazonaws.internal.SdkInternalList<DhcpOptions>(dhcpOptions.length)); } for (DhcpOptions ele : dhcpOptions) { this.dhcpOptions.add(ele); } return this; } /** * <p> * Information about one or more DHCP options sets. * </p> * * @param dhcpOptions * Information about one or more DHCP options sets. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDhcpOptionsResult withDhcpOptions(java.util.Collection<DhcpOptions> dhcpOptions) { setDhcpOptions(dhcpOptions); return this; } /** * <p> * The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more * results to return. * </p> * * @param nextToken * The token to use to retrieve the next page of results. This value is <code>null</code> when there are no * more results to return. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more * results to return. * </p> * * @return The token to use to retrieve the next page of results. This value is <code>null</code> when there are no * more results to return. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token to use to retrieve the next page of results. This value is <code>null</code> when there are no more * results to return. * </p> * * @param nextToken * The token to use to retrieve the next page of results. This value is <code>null</code> when there are no * more results to return. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeDhcpOptionsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDhcpOptions() != null) sb.append("DhcpOptions: ").append(getDhcpOptions()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeDhcpOptionsResult == false) return false; DescribeDhcpOptionsResult other = (DescribeDhcpOptionsResult) obj; if (other.getDhcpOptions() == null ^ this.getDhcpOptions() == null) return false; if (other.getDhcpOptions() != null && other.getDhcpOptions().equals(this.getDhcpOptions()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDhcpOptions() == null) ? 0 : getDhcpOptions().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeDhcpOptionsResult clone() { try { return (DescribeDhcpOptionsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* Copyright (c) 2015 Azzam Enajjar This project is licensed under the terms of the MIT license. Please see LICENSE.md for full license terms. */ package edu.pdx.oss.asthmacontrol; import android.app.DatePickerDialog; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.DatePicker; import android.widget.EditText; import android.widget.GridView; import android.widget.ImageButton; import android.widget.Toast; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; public class UpdateAsthmaTimeActivity extends AppCompatActivity { EditText DATE_TEXT; ImageButton IMAGE_BUTTON; DatePicker DATE_PICKER; Button ADD_DATE_BUTTON, DELETE_BUTTON, BACK_BUTTON, DELETE_ALL_BUTTON, SHOW_ALL_BUTTON, SHOW_FOUR_WEEKS_BUTTON; GridView GRIDVIEW; Calendar calendar = Calendar.getInstance(); Context ctx = this; List<String> li; ArrayAdapter<String> dataAdapter; Integer pos; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_update_asthma_time); DATE_TEXT = (EditText) findViewById(R.id.dateText); IMAGE_BUTTON = (ImageButton) findViewById(R.id.imageButton); DATE_PICKER = (DatePicker) findViewById(R.id.datePicker); ADD_DATE_BUTTON = (Button) findViewById(R.id.addDateButton); DELETE_BUTTON = (Button) findViewById(R.id.deleteDateButton); SHOW_ALL_BUTTON =(Button) findViewById(R.id.showAllButton); SHOW_FOUR_WEEKS_BUTTON = (Button) findViewById(R.id.showFourWeeksButton); BACK_BUTTON = (Button) findViewById(R.id.backButton); GRIDVIEW = (GridView) findViewById(R.id.gridView); DELETE_ALL_BUTTON = (Button) findViewById(R.id.deleteAllButton); DATE_PICKER.setVisibility(View.INVISIBLE); SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" ); DATE_TEXT.setText(sdf.format(new Date())); li = new ArrayList<String>(); dataAdapter = new ArrayAdapter<String>(getApplicationContext(), android.R.layout.simple_spinner_item, li); dataAdapter.setDropDownViewResource(R.layout.activity_update_asthma_time); displayDataOnGridView(); IMAGE_BUTTON.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { new DatePickerDialog(UpdateAsthmaTimeActivity.this, listener, calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH)).show(); } }); ADD_DATE_BUTTON.setOnClickListener(new View.OnClickListener() { String logDate; Date dateObject; @Override public void onClick(View v) { DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); try { String log_Date = DATE_TEXT.getText().toString(); dateObject = formatter.parse(log_Date); logDate = new SimpleDateFormat("yyyy-MM-dd").format(dateObject); } catch (java.text.ParseException e) { e.printStackTrace(); Toast.makeText(getBaseContext(), "Error... Please enter a valid date", Toast.LENGTH_SHORT).show(); } DatabaseOperations dop1 = new DatabaseOperations(ctx); Cursor CR = dop1.getAllDatesFromAsthmaTimeTable(dop1); if(CR.getCount()>0){ CR.moveToFirst(); boolean dateFound = false; do{ if(logDate.equals(CR.getString(0))){ dateFound = true; } }while(CR.moveToNext()); if (dateFound) { Toast.makeText(getBaseContext(), "Error... The date already exists", Toast.LENGTH_SHORT).show(); DATE_TEXT.requestFocus(); return; } } DatabaseOperations dop2 = new DatabaseOperations(ctx); dop2.insertDateForAsthmaTime(dop2, logDate); li.add(logDate); GRIDVIEW.setAdapter(dataAdapter); Toast.makeText(getBaseContext(), "The date added successfully", Toast.LENGTH_SHORT).show(); } }); GRIDVIEW.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { pos = position; } }); DELETE_BUTTON.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if ((pos != null) && (pos >=0)) { DatabaseOperations dop = new DatabaseOperations(ctx); String logDate = li.get(pos); dop.deleteDateFromAsthmaTime(dop, logDate); li.remove(pos); displayDataOnGridView(); pos = pos - 1; Toast.makeText(getBaseContext(), "Selected date has been removed successfully..", Toast.LENGTH_SHORT).show(); } else Toast.makeText(getBaseContext(), "Error... Select a date first", Toast.LENGTH_SHORT).show(); } }); SHOW_ALL_BUTTON.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { displayDataOnGridView(); } }); SHOW_FOUR_WEEKS_BUTTON.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { DatabaseOperations dop = new DatabaseOperations(ctx); try { Cursor CR = dop.getPastFourWeeksFromAsthmaTime(dop); li.clear(); if(CR.getCount()>0){ if (CR.moveToFirst()){ do { String logDate = CR.getString(CR.getColumnIndex(TableData.TableInfo.ASTHMA_TIME_DATE)); li.add(logDate); GRIDVIEW.setAdapter(dataAdapter); } while(CR.moveToNext()); } else{ Toast.makeText(getBaseContext(), "There is no data...", Toast.LENGTH_SHORT).show(); } } else{ Toast.makeText(getBaseContext(), "There is no data...", Toast.LENGTH_SHORT).show(); } CR.close(); }catch (Exception e){ Toast.makeText(getBaseContext(),"Error : " + e.getMessage(), Toast.LENGTH_SHORT).show(); } } }); BACK_BUTTON.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { finish(); } }); DELETE_ALL_BUTTON.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { DatabaseOperations dop = new DatabaseOperations(ctx); dop.deleteAllFromAsthmaTime(dop); } }); } // End of create activity procedure DatePickerDialog.OnDateSetListener listener = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { DATE_TEXT.setText(year + "-" + (monthOfYear + 1) + "-" + dayOfMonth); } }; public void displayDataOnGridView(){ try { SQLiteDatabase SQ = openOrCreateDatabase(TableData.TableInfo.DATABASE_NAME, Context.MODE_PRIVATE, null); Cursor CR = SQ.rawQuery("SELECT * FROM " + TableData.TableInfo.ASTHMA_TIME_TABLE + " ORDER BY " + TableData.TableInfo.ASTHMA_TIME_DATE, null); li.clear(); if(CR.getCount()>0){ if (CR.moveToFirst()){ do { String logDate = CR.getString(CR.getColumnIndex(TableData.TableInfo.ASTHMA_TIME_DATE)); li.add(logDate); GRIDVIEW.setAdapter(dataAdapter); } while(CR.moveToNext()); } else{ Toast.makeText(getBaseContext(), "There is no data...", Toast.LENGTH_SHORT).show(); } } CR.close(); SQ.close(); }catch (Exception e){ Toast.makeText(getBaseContext(),"Error : " + e.getMessage(), Toast.LENGTH_SHORT).show(); } } }
/* * Copyright 2016 Sai Pullabhotla. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jmethods.catatumbo.impl; import java.lang.reflect.Field; import com.jmethods.catatumbo.CreatedTimestamp; import com.jmethods.catatumbo.EntityManagerException; import com.jmethods.catatumbo.Indexer; import com.jmethods.catatumbo.IndexerFactory; import com.jmethods.catatumbo.Mapper; import com.jmethods.catatumbo.MapperFactory; import com.jmethods.catatumbo.NoSuitableMapperException; import com.jmethods.catatumbo.Property; import com.jmethods.catatumbo.SecondaryIndex; import com.jmethods.catatumbo.UpdatedTimestamp; import com.jmethods.catatumbo.Utility; import com.jmethods.catatumbo.Version; /** * Objects of this class contain metadata about a property of an entity. * * @author Sai Pullabhotla */ public class PropertyMetadata extends FieldMetadata { /** * Default prefix for secondary index name. */ private static final String DEFAULT_SECONDARY_INDEX_PREFIX = "$"; /** * The property name, in the Cloud Datastore, to which a field is mapped */ private String mappedName; /** * If the property is indexed or not */ private boolean indexed; /** * If this property is optional */ protected boolean optional; /** * Secondary indexer for this property */ private Indexer secondaryIndexer; /** * Secondary index name */ private String secondaryIndexName; /** * Mapper for the field represented by this metadata */ protected final Mapper mapper; /** * Creates an instance of <code>PropertyMetadata</code>. * * @param field * the field */ public PropertyMetadata(Field field) { super(field); String mappedName = field.getName(); boolean indexed = true; boolean optional = false; Property propertyAnnotation = field.getAnnotation(Property.class); if (propertyAnnotation != null) { String temp = propertyAnnotation.name(); if (!Utility.isNullOrEmpty(temp)) { mappedName = temp; } indexed = propertyAnnotation.indexed(); optional = propertyAnnotation.optional(); } this.mappedName = mappedName; this.indexed = indexed; setOptional(optional); initializeSecondaryIndexer(); this.mapper = initializeMapper(); } /** * Creates a new instance of <code>PropertyMetadata</code>. * * @param field * the field * @param mappedName * name of the property in the Datastore * @param indexed * whether or not the property is indexed * @param optional * whether or not the property is optional */ public PropertyMetadata(Field field, String mappedName, boolean indexed, boolean optional) { super(field); this.mappedName = mappedName; this.indexed = indexed; setOptional(optional); initializeSecondaryIndexer(); this.mapper = initializeMapper(); } /** * Returns the mapped name. * * @return the mapped name. */ public String getMappedName() { return mappedName; } /** * Sets the mapped name. * * @param mappedName * the mapped name. */ public void setMappedName(String mappedName) { this.mappedName = mappedName; } /** * Returns whether or not the property is indexed. * * @return true, if indexed; false, otherwise. */ public boolean isIndexed() { return indexed; } /** * Sets whether or not the property is indexed. * * @param indexed * whether or not the property is indexed. */ public void setIndexed(boolean indexed) { this.indexed = indexed; } /** * Returns the secondary indexer associated with this property, if any. * * @return the secondaryIndexer the secondary indexer associated with this property. May be * <code>null</code>. */ public Indexer getSecondaryIndexer() { return secondaryIndexer; } /** * Returns the secondary index name, if any. * * @return the secondary index name. May be <code>null</code>. */ public String getSecondaryIndexName() { return secondaryIndexName; } /** * Tells whether or not the field represented by this metadata is optional. * * @return <code>true</code>, if the field represented by this metadata is optional; * <code>false</code>, otherwise. */ public boolean isOptional() { return optional; } /** * Sets whether or not the field represented by this metadata is optional. * * @param optional * whether or not the field represented by this metadata is optional. */ public void setOptional(boolean optional) { if (field.getType().isPrimitive() || field.isAnnotationPresent(Version.class) || field.isAnnotationPresent(CreatedTimestamp.class) || field.isAnnotationPresent(UpdatedTimestamp.class)) { this.optional = false; } else { this.optional = optional; } } /** * Initializes the secondary indexer for this property, if any. */ private void initializeSecondaryIndexer() { SecondaryIndex secondaryIndexAnnotation = field.getAnnotation(SecondaryIndex.class); if (secondaryIndexAnnotation == null) { return; } String indexName = secondaryIndexAnnotation.name(); if (indexName == null || indexName.trim().length() == 0) { indexName = DEFAULT_SECONDARY_INDEX_PREFIX + mappedName; } this.secondaryIndexName = indexName; try { secondaryIndexer = IndexerFactory.getInstance().getIndexer(field); } catch (Exception exp) { String pattern = "No suitable Indexer found or error occurred while creating the indexer " + "for field %s in class %s"; String message = String.format(pattern, field.getName(), field.getDeclaringClass().getName()); throw new EntityManagerException(message, exp); } } /** * Returns the {@link Mapper} associated with the field to which this metadata belongs. * * @return he {@link Mapper} associated with the field to which this metadata belongs. */ public Mapper getMapper() { return mapper; } /** * Initializes the {@link Mapper} for this field. * * @return the {@link Mapper} for the field represented by this metadata */ private Mapper initializeMapper() { try { return MapperFactory.getInstance().getMapper(field); } catch (NoSuitableMapperException exp) { String message = String.format( "No suitable mapper found or error occurred creating a mapper for field %s in class %s", field.getName(), field.getDeclaringClass().getName()); throw new NoSuitableMapperException(message, exp); } } }
package controleur.Commande; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import javax.swing.JFrame; import javax.swing.JOptionPane; import modele.Assiette; import modele.Commande; import modele.Employe; import vue.commande.PaiementIndiv; import vue.commande.PayerCommande; /** * @author Cesar HERNANDEZ ANTONIO, Joaquin GALVAN ANGELES * @version 1.0 * Description : Controleur pour paiement individuel */ public class ControleurPaiementInd implements ActionListener { private PaiementIndiv paiInd; private Commande com, tmp; private Assiette assi; private Employe emplo; private int table, noOrd, noCom, noEmpl, comAct=1; private String date; float suma, total, monn, cam, totgen; DecimalFormat dec = new DecimalFormat("#.##"); ArrayList<Object[]> tabl = new ArrayList<Object[]>(); public ControleurPaiementInd(PaiementIndiv pi, Commande co) { paiInd = pi; com = co; paiInd.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent arg0) { paiInd.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); PayerCommande pay = new PayerCommande(); ControleurPayerComm cp = new ControleurPayerComm(pay, com); cp.recibePay(getAssiePer(), getEmploPer()); com.initialize(getAssiePer(), getEmploPer()); cp.startPayerComm(); } }); paiInd.txtMonn.addKeyListener(new KeyAdapter() { @Override public void keyTyped(KeyEvent e) { char car = e.getKeyChar(); if ((car < '0' || car > '9')) e.consume(); } }); paiInd.btnAnnuler.addActionListener(this); paiInd.btnMonnaie.addActionListener(this); paiInd.btnSuivante.addActionListener(this); } /****************/ public void recibePer(Assiette a, Employe e){ assi = a; // System.out.println("###### PAYPP :: "+assi); emplo = e; // System.out.println("$$$$$$ PAYPP :: "+emplo); } public Employe getEmploPer(){ // System.out.println("PAYPP :: "+assi); return emplo; } public Assiette getAssiePer(){ // System.out.println("PAYPP :: "+assi); return assi; } /******************/ public void initData(int table){ this.table=table; tabl.clear(); tmp = com.payTable(this.table); // System.out.println("Recibimos la orden :: "+tmp); noOrd = tmp.getNo_orden(); noCom = tmp.getNo_comensal(); noEmpl = tmp.getNo_empleado(); date = tmp.getFecha(); paiInd.lblEmplo.setText(Integer.toString(noEmpl)); paiInd.txtNoComm.setText(Integer.toString(noOrd)); paiInd.txtNoPers.setText(Integer.toString(comAct)); tabl = com.llenapayInd(tmp.getNo_orden(), this.table, 1); for (int i = 0; i < tabl.size(); i++) { paiInd.dtm.addRow(tabl.get(i)); } int filas=paiInd.tabAssi.getRowCount(); for (int i=0; filas>i; i++) { suma+=Float.parseFloat(paiInd.tabAssi.getValueAt(i, 2).toString()); } paiInd.txtTotal.setText(dec.format(suma)); } public void startPaiementInd(){ paiInd.setVisible(true); paiInd.setLocationRelativeTo(null); } @Override public void actionPerformed(ActionEvent actE) { if(actE.getSource().equals(paiInd.btnAnnuler)){ PayerCommande pay = new PayerCommande(); ControleurPayerComm cp = new ControleurPayerComm(pay, com); cp.recibePay(getAssiePer(), getEmploPer()); com.initialize(getAssiePer(), getEmploPer()); cp.startPayerComm(); paiInd.setVisible(false); }else if(actE.getSource().equals(paiInd.btnMonnaie)){ if(paiInd.txtMonto.getText().equals("") || paiInd.txtMonto.getText().equals(null)){ JOptionPane.showMessageDialog(null, "Champ monnaie vide!"); paiInd.txtMonto.requestFocus(); }else { total = Float.parseFloat(paiInd.txtTotal.getText()); monn = Float.parseFloat(paiInd.txtMonto.getText()); if(monn>=total){ paiInd.btnSuivante.setEnabled(true); cam = monn-total; paiInd.txtMonn.setText(dec.format(cam)); }else{ JOptionPane.showMessageDialog(null, "Pas assez de fonds,\nManque : "+dec.format(total-monn)+" \u20AC"); paiInd.btnSuivante.setEnabled(false); } } }else if(actE.getSource().equals(paiInd.btnSuivante)){ if(paiInd.txtMonto.getText().equals("") || paiInd.txtMonto.getText().equals(null)){ JOptionPane.showMessageDialog(null, "Champ monnaie vide!"); paiInd.txtMonto.requestFocus(); }else { total = Float.parseFloat(paiInd.txtTotal.getText()); monn = Float.parseFloat(paiInd.txtMonto.getText()); if(monn>=total){ totgen+=total; paiInd.btnSuivante.setEnabled(true); cam = monn-total; paiInd.txtMonn.setText(dec.format(cam)); int nom = Integer.parseInt(paiInd.txtNoPers.getText()); JOptionPane.showMessageDialog(null, "Per act "+nom); if(noCom>nom){ suma = 0; tabl.clear(); comAct = Integer.parseInt(paiInd.txtNoPers.getText()); paiInd.txtNoPers.setText(Integer.toString(comAct+1)); cleanTable(); comAct = Integer.parseInt(paiInd.txtNoPers.getText()); tabl = com.llenapayInd(tmp.getNo_orden(), this.table, comAct ); for (int i = 0; i < tabl.size(); i++) { paiInd.dtm.addRow(tabl.get(i)); } int filas=paiInd.tabAssi.getRowCount(); for (int i=0; filas>i; i++) { suma+=Float.parseFloat(paiInd.tabAssi.getValueAt(i, 2).toString()); } paiInd.txtTotal.setText(dec.format(suma)); }else{ JOptionPane.showMessageDialog(null, "Commande pay\u00E9e correctement! "); com.ajouterCommande(new Commande(noOrd, table, noCom, noEmpl, "Prepare", date, totgen)); try { com.saveState(); } catch (Exception e) { System.err.println("Error en saveState() debido a: "+e.getMessage()); } paiInd.btnSuivante.setEnabled(false); } }else{ JOptionPane.showMessageDialog(null, "Pas assez de fonds,\nManque : "+dec.format(total-monn)+" \u20AC"); paiInd.btnSuivante.setEnabled(false); } } } } public void cleanTable(){ tabl.clear(); paiInd.txtMonto.setText(""); paiInd.txtTotal.setText(""); paiInd.txtMonn.setText(""); int filas=paiInd.tabAssi.getRowCount(); for (int i=0; filas>i; i++) { paiInd.dtm.removeRow(0); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.persistence.impl.journal; import java.io.File; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ActiveMQIllegalStateException; import org.apache.activemq.artemis.api.core.ActiveMQInternalErrorException; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.io.IOCriticalErrorListener; import org.apache.activemq.artemis.core.io.SequentialFile; import org.apache.activemq.artemis.core.io.SequentialFileFactory; import org.apache.activemq.artemis.core.io.aio.AIOSequentialFileFactory; import org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory; import org.apache.activemq.artemis.core.journal.Journal; import org.apache.activemq.artemis.core.journal.impl.JournalFile; import org.apache.activemq.artemis.core.journal.impl.JournalImpl; import org.apache.activemq.artemis.core.message.impl.MessageInternal; import org.apache.activemq.artemis.core.paging.PagedMessage; import org.apache.activemq.artemis.core.paging.PagingManager; import org.apache.activemq.artemis.core.paging.PagingStore; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.persistence.impl.journal.codec.LargeMessageEncoding; import org.apache.activemq.artemis.core.persistence.impl.journal.codec.PendingLargeMessageEncoding; import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ReplicationLiveIsStoppingMessage; import org.apache.activemq.artemis.core.replication.ReplicatedJournal; import org.apache.activemq.artemis.core.replication.ReplicationManager; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.core.server.LargeServerMessage; import org.apache.activemq.artemis.core.server.ServerMessage; import org.apache.activemq.artemis.core.server.files.FileStoreMonitor; import org.apache.activemq.artemis.utils.ExecutorFactory; import org.jboss.logging.Logger; public class JournalStorageManager extends AbstractJournalStorageManager { private static final Logger logger = Logger.getLogger(JournalStorageManager.class); private SequentialFileFactory journalFF; private SequentialFileFactory bindingsFF; SequentialFileFactory largeMessagesFactory; private Journal originalMessageJournal; private Journal originalBindingsJournal; protected String largeMessagesDirectory; private ReplicationManager replicator; public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ScheduledExecutorService scheduledExecutorService, final ExecutorFactory ioExecutors) { this(config, executorFactory, scheduledExecutorService, ioExecutors, null); } public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ExecutorFactory ioExecutors) { this(config, executorFactory, null, ioExecutors, null); } public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ScheduledExecutorService scheduledExecutorService, final ExecutorFactory ioExecutors, final IOCriticalErrorListener criticalErrorListener) { super(config, executorFactory, scheduledExecutorService, ioExecutors, criticalErrorListener); } public JournalStorageManager(final Configuration config, final ExecutorFactory executorFactory, final ExecutorFactory ioExecutors, final IOCriticalErrorListener criticalErrorListener) { super(config, executorFactory, null, ioExecutors, criticalErrorListener); } @Override protected void init(Configuration config, IOCriticalErrorListener criticalErrorListener) { if (config.getJournalType() != JournalType.NIO && config.getJournalType() != JournalType.ASYNCIO) { throw ActiveMQMessageBundle.BUNDLE.invalidJournal(); } bindingsFF = new NIOSequentialFileFactory(config.getBindingsLocation(), criticalErrorListener, config.getJournalMaxIO_NIO()); bindingsFF.setDatasync(config.isJournalDatasync()); Journal localBindings = new JournalImpl(ioExecutors, 1024 * 1024, 2, config.getJournalCompactMinFiles(), config.getJournalPoolFiles(), config.getJournalCompactPercentage(), bindingsFF, "activemq-bindings", "bindings", 1, 0); bindingsJournal = localBindings; originalBindingsJournal = localBindings; if (config.getJournalType() == JournalType.ASYNCIO) { ActiveMQServerLogger.LOGGER.journalUseAIO(); journalFF = new AIOSequentialFileFactory(config.getJournalLocation(), config.getJournalBufferSize_AIO(), config.getJournalBufferTimeout_AIO(), config.getJournalMaxIO_AIO(), config.isLogJournalWriteRate(), criticalErrorListener); } else if (config.getJournalType() == JournalType.NIO) { ActiveMQServerLogger.LOGGER.journalUseNIO(); journalFF = new NIOSequentialFileFactory(config.getJournalLocation(), true, config.getJournalBufferSize_NIO(), config.getJournalBufferTimeout_NIO(), config.getJournalMaxIO_NIO(), config.isLogJournalWriteRate(), criticalErrorListener); } else { throw ActiveMQMessageBundle.BUNDLE.invalidJournalType2(config.getJournalType()); } journalFF.setDatasync(config.isJournalDatasync()); Journal localMessage = new JournalImpl(ioExecutors, config.getJournalFileSize(), config.getJournalMinFiles(), config.getJournalPoolFiles(), config.getJournalCompactMinFiles(), config.getJournalCompactPercentage(), journalFF, "activemq-data", "amq", config.getJournalType() == JournalType.ASYNCIO ? config.getJournalMaxIO_AIO() : config.getJournalMaxIO_NIO(), 0); messageJournal = localMessage; originalMessageJournal = localMessage; largeMessagesDirectory = config.getLargeMessagesDirectory(); largeMessagesFactory = new NIOSequentialFileFactory(config.getLargeMessagesLocation(), false, criticalErrorListener, 1); perfBlastPages = config.getJournalPerfBlastPages(); if (config.getPageMaxConcurrentIO() != 1) { pageMaxConcurrentIO = new Semaphore(config.getPageMaxConcurrentIO()); } else { pageMaxConcurrentIO = null; } } // Life Cycle Handlers @Override protected void beforeStart() throws Exception { checkAndCreateDir(config.getBindingsLocation(), config.isCreateBindingsDir()); checkAndCreateDir(config.getJournalLocation(), config.isCreateJournalDir()); checkAndCreateDir(config.getLargeMessagesLocation(), config.isCreateJournalDir()); cleanupIncompleteFiles(); } @Override protected void beforeStop() throws Exception { if (replicator != null) { replicator.stop(); } } @Override public void stop() throws Exception { stop(false, true); } public boolean isReplicated() { return replicator != null; } private void cleanupIncompleteFiles() throws Exception { if (largeMessagesFactory != null) { List<String> tmpFiles = largeMessagesFactory.listFiles("tmp"); for (String tmpFile : tmpFiles) { SequentialFile file = largeMessagesFactory.createSequentialFile(tmpFile); file.delete(); } } } @Override public synchronized void stop(boolean ioCriticalError, boolean sendFailover) throws Exception { if (!started) { return; } if (!ioCriticalError) { performCachedLargeMessageDeletes(); // Must call close to make sure last id is persisted if (journalLoaded && idGenerator != null) idGenerator.persistCurrentID(); } final CountDownLatch latch = new CountDownLatch(1); try { executor.execute(new Runnable() { @Override public void run() { latch.countDown(); } }); latch.await(30, TimeUnit.SECONDS); } catch (RejectedExecutionException ignored) { // that's ok } // We cache the variable as the replicator could be changed between here and the time we call stop // since sendLiveIsStopping may issue a close back from the channel // and we want to ensure a stop here just in case ReplicationManager replicatorInUse = replicator; if (replicatorInUse != null) { if (sendFailover) { final OperationContext token = replicator.sendLiveIsStopping(ReplicationLiveIsStoppingMessage.LiveStopping.FAIL_OVER); if (token != null) { try { token.waitCompletion(5000); } catch (Exception e) { // ignore it } } } replicatorInUse.stop(); } bindingsJournal.stop(); messageJournal.stop(); journalLoaded = false; started = false; } /** * Assumption is that this is only called with a writeLock on the StorageManager. */ @Override protected void performCachedLargeMessageDeletes() { for (Long largeMsgId : largeMessagesToDelete) { SequentialFile msg = createFileForLargeMessage(largeMsgId, LargeMessageExtension.DURABLE); try { msg.delete(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.journalErrorDeletingMessage(e, largeMsgId); } if (replicator != null) { replicator.largeMessageDelete(largeMsgId); } } largeMessagesToDelete.clear(); } protected SequentialFile createFileForLargeMessage(final long messageID, final boolean durable) { if (durable) { return createFileForLargeMessage(messageID, LargeMessageExtension.DURABLE); } else { return createFileForLargeMessage(messageID, LargeMessageExtension.TEMPORARY); } } @Override /** * @param messages * @param buff * @return * @throws Exception */ protected LargeServerMessage parseLargeMessage(final Map<Long, ServerMessage> messages, final ActiveMQBuffer buff) throws Exception { LargeServerMessage largeMessage = createLargeMessage(); LargeMessageEncoding messageEncoding = new LargeMessageEncoding(largeMessage); messageEncoding.decode(buff); if (largeMessage.containsProperty(Message.HDR_ORIG_MESSAGE_ID)) { // for compatibility: couple with old behaviour, copying the old file to avoid message loss long originalMessageID = largeMessage.getLongProperty(Message.HDR_ORIG_MESSAGE_ID); SequentialFile currentFile = createFileForLargeMessage(largeMessage.getMessageID(), true); if (!currentFile.exists()) { SequentialFile linkedFile = createFileForLargeMessage(originalMessageID, true); if (linkedFile.exists()) { linkedFile.copyTo(currentFile); linkedFile.close(); } } currentFile.close(); } return largeMessage; } @Override public void pageClosed(final SimpleString storeName, final int pageNumber) { if (isReplicated()) { readLock(); try { if (isReplicated()) replicator.pageClosed(storeName, pageNumber); } finally { readUnLock(); } } } @Override public void pageDeleted(final SimpleString storeName, final int pageNumber) { if (isReplicated()) { readLock(); try { if (isReplicated()) replicator.pageDeleted(storeName, pageNumber); } finally { readUnLock(); } } } @Override public void pageWrite(final PagedMessage message, final int pageNumber) { if (isReplicated()) { // Note: (https://issues.jboss.org/browse/HORNETQ-1059) // We have to replicate durable and non-durable messages on paging // since acknowledgments are written using the page-position. // Say you are sending durable and non-durable messages to a page // The ACKs would be done to wrong positions, and the backup would be a mess readLock(); try { if (isReplicated()) replicator.pageWrite(message, pageNumber); } finally { readUnLock(); } } } @Override public ByteBuffer allocateDirectBuffer(int size) { return journalFF.allocateDirectBuffer(size); } @Override public void freeDirectBuffer(ByteBuffer buffer) { journalFF.releaseBuffer(buffer); } public long storePendingLargeMessage(final long messageID) throws Exception { readLock(); try { long recordID = generateID(); messageJournal.appendAddRecord(recordID, JournalRecordIds.ADD_LARGE_MESSAGE_PENDING, new PendingLargeMessageEncoding(messageID), true, getContext(true)); return recordID; } finally { readUnLock(); } } // This should be accessed from this package only void deleteLargeMessageFile(final LargeServerMessage largeServerMessage) throws ActiveMQException { if (largeServerMessage.getPendingRecordID() < 0) { try { // The delete file happens asynchronously // And the client won't be waiting for the actual file to be deleted. // We set a temporary record (short lived) on the journal // to avoid a situation where the server is restarted and pending large message stays on forever largeServerMessage.setPendingRecordID(storePendingLargeMessage(largeServerMessage.getMessageID())); } catch (Exception e) { throw new ActiveMQInternalErrorException(e.getMessage(), e); } } final SequentialFile file = largeServerMessage.getFile(); if (file == null) { return; } if (largeServerMessage.isDurable() && isReplicated()) { readLock(); try { if (isReplicated() && replicator.isSynchronizing()) { synchronized (largeMessagesToDelete) { largeMessagesToDelete.add(Long.valueOf(largeServerMessage.getMessageID())); confirmLargeMessage(largeServerMessage); } return; } } finally { readUnLock(); } } Runnable deleteAction = new Runnable() { @Override public void run() { try { readLock(); try { if (replicator != null) { replicator.largeMessageDelete(largeServerMessage.getMessageID()); } file.delete(); // The confirm could only be done after the actual delete is done confirmLargeMessage(largeServerMessage); } finally { readUnLock(); } } catch (Exception e) { ActiveMQServerLogger.LOGGER.journalErrorDeletingMessage(e, largeServerMessage.getMessageID()); } } }; if (executor == null) { deleteAction.run(); } else { executor.execute(deleteAction); } } @Override public LargeServerMessage createLargeMessage() { return new LargeServerMessageImpl(this); } @Override public LargeServerMessage createLargeMessage(final long id, final MessageInternal message) throws Exception { readLock(); try { if (isReplicated()) { replicator.largeMessageBegin(id); } LargeServerMessageImpl largeMessage = (LargeServerMessageImpl) createLargeMessage(); largeMessage.copyHeadersAndProperties(message); largeMessage.setMessageID(id); // We do this here to avoid a case where the replication gets a list without this file // to avoid a race largeMessage.validateFile(); if (largeMessage.isDurable()) { // We store a marker on the journal that the large file is pending long pendingRecordID = storePendingLargeMessage(id); largeMessage.setPendingRecordID(pendingRecordID); } return largeMessage; } finally { readUnLock(); } } @Override public SequentialFile createFileForLargeMessage(final long messageID, LargeMessageExtension extension) { return largeMessagesFactory.createSequentialFile(messageID + extension.getExtension()); } /** * Send an entire journal file to a replicating backup server. */ private void sendJournalFile(JournalFile[] journalFiles, JournalContent type) throws Exception { for (JournalFile jf : journalFiles) { if (!started) return; replicator.syncJournalFile(jf, type); } } private JournalFile[] prepareJournalForCopy(Journal journal, JournalContent contentType, String nodeID, boolean autoFailBack) throws Exception { journal.forceMoveNextFile(); JournalFile[] datafiles = journal.getDataFiles(); replicator.sendStartSyncMessage(datafiles, contentType, nodeID, autoFailBack); return datafiles; } @Override public void startReplication(ReplicationManager replicationManager, PagingManager pagingManager, String nodeID, final boolean autoFailBack, long initialReplicationSyncTimeout) throws Exception { if (!started) { throw new IllegalStateException("JournalStorageManager must be started..."); } assert replicationManager != null; if (!(messageJournal instanceof JournalImpl) || !(bindingsJournal instanceof JournalImpl)) { throw ActiveMQMessageBundle.BUNDLE.notJournalImpl(); } // We first do a compact without any locks, to avoid copying unnecessary data over the network. // We do this without holding the storageManager lock, so the journal stays open while compact is being done originalMessageJournal.scheduleCompactAndBlock(-1); originalBindingsJournal.scheduleCompactAndBlock(-1); JournalFile[] messageFiles = null; JournalFile[] bindingsFiles = null; // We get a picture of the current sitaution on the large messages // and we send the current messages while more state is coming Map<Long, Pair<String, Long>> pendingLargeMessages = null; try { Map<SimpleString, Collection<Integer>> pageFilesToSync; storageManagerLock.writeLock().lock(); try { if (isReplicated()) throw new ActiveMQIllegalStateException("already replicating"); replicator = replicationManager; // Establishes lock originalMessageJournal.synchronizationLock(); originalBindingsJournal.synchronizationLock(); try { originalBindingsJournal.replicationSyncPreserveOldFiles(); originalMessageJournal.replicationSyncPreserveOldFiles(); pagingManager.lock(); try { pagingManager.disableCleanup(); messageFiles = prepareJournalForCopy(originalMessageJournal, JournalContent.MESSAGES, nodeID, autoFailBack); bindingsFiles = prepareJournalForCopy(originalBindingsJournal, JournalContent.BINDINGS, nodeID, autoFailBack); pageFilesToSync = getPageInformationForSync(pagingManager); pendingLargeMessages = recoverPendingLargeMessages(); } finally { pagingManager.unlock(); } } finally { originalMessageJournal.synchronizationUnlock(); originalBindingsJournal.synchronizationUnlock(); } bindingsJournal = new ReplicatedJournal(((byte) 0), originalBindingsJournal, replicator); messageJournal = new ReplicatedJournal((byte) 1, originalMessageJournal, replicator); // We need to send the list while locking otherwise part of the body might get sent too soon // it will send a list of IDs that we are allocating replicator.sendLargeMessageIdListMessage(pendingLargeMessages); } finally { storageManagerLock.writeLock().unlock(); } sendJournalFile(messageFiles, JournalContent.MESSAGES); sendJournalFile(bindingsFiles, JournalContent.BINDINGS); sendLargeMessageFiles(pendingLargeMessages); sendPagesToBackup(pageFilesToSync, pagingManager); storageManagerLock.writeLock().lock(); try { if (replicator != null) { replicator.sendSynchronizationDone(nodeID, initialReplicationSyncTimeout); performCachedLargeMessageDeletes(); } } finally { storageManagerLock.writeLock().unlock(); } } catch (Exception e) { logger.warn(e.getMessage(), e); stopReplication(); throw e; } finally { pagingManager.resumeCleanup(); // Re-enable compact and reclaim of journal files originalBindingsJournal.replicationSyncFinished(); originalMessageJournal.replicationSyncFinished(); } } private void sendLargeMessageFiles(final Map<Long, Pair<String, Long>> pendingLargeMessages) throws Exception { Iterator<Map.Entry<Long, Pair<String, Long>>> iter = pendingLargeMessages.entrySet().iterator(); while (started && iter.hasNext()) { Map.Entry<Long, Pair<String, Long>> entry = iter.next(); String fileName = entry.getValue().getA(); final long id = entry.getKey(); long size = entry.getValue().getB(); SequentialFile seqFile = largeMessagesFactory.createSequentialFile(fileName); if (!seqFile.exists()) continue; if (replicator != null) { replicator.syncLargeMessageFile(seqFile, size, id); } else { throw ActiveMQMessageBundle.BUNDLE.replicatorIsNull(); } } } /** * @param pagingManager * @return * @throws Exception */ private Map<SimpleString, Collection<Integer>> getPageInformationForSync(PagingManager pagingManager) throws Exception { Map<SimpleString, Collection<Integer>> info = new HashMap<>(); for (SimpleString storeName : pagingManager.getStoreNames()) { PagingStore store = pagingManager.getPageStore(storeName); info.put(storeName, store.getCurrentIds()); store.forceAnotherPage(); } return info; } private void checkAndCreateDir(final File dir, final boolean create) { if (!dir.exists()) { if (create) { if (!dir.mkdirs()) { throw new IllegalStateException("Failed to create directory " + dir); } } else { throw ActiveMQMessageBundle.BUNDLE.cannotCreateDir(dir.getAbsolutePath()); } } } /** * Sets a list of large message files into the replicationManager for synchronization. * <p> * Collects a list of existing large messages and their current size, passing re. * <p> * So we know how much of a given message to sync with the backup. Further data appends to the * messages will be replicated normally. * * @throws Exception */ private Map<Long, Pair<String, Long>> recoverPendingLargeMessages() throws Exception { Map<Long, Pair<String, Long>> largeMessages = new HashMap<>(); // only send durable messages... // listFiles append a "." to anything... List<String> filenames = largeMessagesFactory.listFiles("msg"); List<Long> idList = new ArrayList<>(); for (String filename : filenames) { Long id = getLargeMessageIdFromFilename(filename); if (!largeMessagesToDelete.contains(id)) { idList.add(id); SequentialFile seqFile = largeMessagesFactory.createSequentialFile(filename); long size = seqFile.size(); largeMessages.put(id, new Pair<>(filename, size)); } } return largeMessages; } /** * @param pageFilesToSync * @throws Exception */ private void sendPagesToBackup(Map<SimpleString, Collection<Integer>> pageFilesToSync, PagingManager manager) throws Exception { for (Map.Entry<SimpleString, Collection<Integer>> entry : pageFilesToSync.entrySet()) { if (!started) return; PagingStore store = manager.getPageStore(entry.getKey()); store.sendPages(replicator, entry.getValue()); } } private long getLargeMessageIdFromFilename(String filename) { return Long.parseLong(filename.split("\\.")[0]); } /** * Stops replication by resetting replication-related fields to their 'unreplicated' state. */ @Override public void stopReplication() { logger.trace("stopReplication()"); storageManagerLock.writeLock().lock(); try { if (replicator == null) return; bindingsJournal = originalBindingsJournal; messageJournal = originalMessageJournal; try { replicator.stop(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorStoppingReplicationManager(e); } replicator = null; // delete inside the writeLock. Avoids a lot of state checking and races with // startReplication. // This method should not be called under normal circumstances performCachedLargeMessageDeletes(); } finally { storageManagerLock.writeLock().unlock(); } } @Override public final void addBytesToLargeMessage(final SequentialFile file, final long messageId, final byte[] bytes) throws Exception { readLock(); try { file.position(file.size()); file.writeDirect(ByteBuffer.wrap(bytes), false); if (isReplicated()) { replicator.largeMessageWrite(messageId, bytes); } } finally { readUnLock(); } } @Override public void injectMonitor(FileStoreMonitor monitor) throws Exception { if (journalFF != null) { monitor.addStore(journalFF.getDirectory()); } if (largeMessagesFactory != null) { monitor.addStore(largeMessagesFactory.getDirectory()); } if (bindingsFF != null) { monitor.addStore(bindingsFF.getDirectory()); } } }
package psidev.psi.mi.jami.enricher.impl.minimal; import psidev.psi.mi.jami.bridges.exception.BridgeFailedException; import psidev.psi.mi.jami.bridges.fetcher.PublicationFetcher; import psidev.psi.mi.jami.enricher.PublicationEnricher; import psidev.psi.mi.jami.enricher.exception.EnricherException; import psidev.psi.mi.jami.enricher.impl.AbstractMIEnricher; import psidev.psi.mi.jami.enricher.listener.EnrichmentStatus; import psidev.psi.mi.jami.enricher.listener.PublicationEnricherListener; import psidev.psi.mi.jami.enricher.util.EnricherUtils; import psidev.psi.mi.jami.model.Publication; import psidev.psi.mi.jami.model.Xref; /** * Provides minimal enrichment of Publication. * * - enrich identifiers (pubmed, doi, etc.) of a publication. It will use DefaultXrefComparator to compare identifiers and add missing identifiers without * removing any existing identifiers. * - enrich authors of a publication. It will add all missing authors but will not remove any existing authors * - enrich publication date. It will only enrich the publication date if it is not already set in the publication to enrich. It will not * override any existing publication date * * It will ignore all other properties of a publication * * @author Gabriel Aldam (galdam@ebi.ac.uk) * @since 31/07/13 */ public class MinimalPublicationEnricher extends AbstractMIEnricher<Publication> implements PublicationEnricher { private int retryCount = 5; private PublicationEnricherListener listener = null; private PublicationFetcher fetcher = null; /** * The only constructor. It requires a publication fetcher. * If the publication fetcher is null, an illegal state exception will be thrown at the next enrichment. * * @param fetcher The PublicationFetcher to use. */ public MinimalPublicationEnricher(PublicationFetcher fetcher){ if (fetcher == null){ throw new IllegalArgumentException("The fetcher is required and cannot be null"); } this.fetcher = fetcher; } /** * Gets the publication fetcher which is currently being used to retrieve entries * * @return the current publication fetcher. */ public PublicationFetcher getPublicationFetcher(){ return fetcher; } /** * {@inheritDoc} * * Sets the listener to report publication changes to. * Can be null. */ public void setPublicationEnricherListener(PublicationEnricherListener listener){ this.listener = listener; } /** * Gets the current publication listener * * @return the current publication listener */ public PublicationEnricherListener getPublicationEnricherListener(){ return listener; } /** * <p>Getter for the field <code>retryCount</code>.</p> * * @return a int. */ public int getRetryCount() { return retryCount; } /** * <p>Setter for the field <code>retryCount</code>.</p> * * @param retryCount a int. */ public void setRetryCount(int retryCount) { this.retryCount = retryCount; } /** * The strategy for the enrichment of the publication. * This methods can be overwritten to change the behaviour of the enrichment. * * @param publicationToEnrich The publication which is being enriched. * @param fetchedPublication a {@link psidev.psi.mi.jami.model.Publication} object. * @throws psidev.psi.mi.jami.enricher.exception.EnricherException if any. */ public void processPublication(Publication publicationToEnrich, Publication fetchedPublication) throws EnricherException{ // == PUBMED ID and other identifiers ====================================================================== processIdentifiers(publicationToEnrich, fetchedPublication); // == AUTHORS ======================================================================= processAuthors(publicationToEnrich, fetchedPublication); // == PUBLICATION DATE ============================================================ processPublicationDate(publicationToEnrich, fetchedPublication); // other properties processOtherProperties(publicationToEnrich, fetchedPublication); } /** * <p>processOtherProperties.</p> * * @param publicationToEnrich a {@link psidev.psi.mi.jami.model.Publication} object. * @param fetchedPublication a {@link psidev.psi.mi.jami.model.Publication} object. * @throws psidev.psi.mi.jami.enricher.exception.EnricherException if any. */ protected void processOtherProperties(Publication publicationToEnrich, Publication fetchedPublication) throws EnricherException{ // do nothing } /** * <p>processPublicationDate.</p> * * @param publicationToEnrich a {@link psidev.psi.mi.jami.model.Publication} object. * @param fetched a {@link psidev.psi.mi.jami.model.Publication} object. * @throws psidev.psi.mi.jami.enricher.exception.EnricherException if any. */ protected void processPublicationDate(Publication publicationToEnrich, Publication fetched) throws EnricherException{ if(publicationToEnrich.getPublicationDate() == null && fetched.getPublicationDate() != null) { publicationToEnrich.setPublicationDate(fetched.getPublicationDate()); if(getPublicationEnricherListener() != null) getPublicationEnricherListener().onPublicationDateUpdated(publicationToEnrich , null); } } /** * <p>processAuthors.</p> * * @param publicationToEnrich a {@link psidev.psi.mi.jami.model.Publication} object. * @param fetched a {@link psidev.psi.mi.jami.model.Publication} object. * @throws psidev.psi.mi.jami.enricher.exception.EnricherException if any. */ protected void processAuthors(Publication publicationToEnrich, Publication fetched) throws EnricherException{ // only add authors if empty collection. Authors are an ordered list and it does not make sens to complete an author list. Either it is there or it is not. if(!fetched.getAuthors().isEmpty() && publicationToEnrich.getAuthors().isEmpty()){ for(String author : fetched.getAuthors()){ publicationToEnrich.getAuthors().add(author); if(getPublicationEnricherListener() != null) getPublicationEnricherListener().onAuthorAdded(publicationToEnrich , author); } } } /** * <p>processIdentifiers.</p> * * @param publicationToEnrich a {@link psidev.psi.mi.jami.model.Publication} object. * @param fetched a {@link psidev.psi.mi.jami.model.Publication} object. * @throws psidev.psi.mi.jami.enricher.exception.EnricherException if any. */ protected void processIdentifiers(Publication publicationToEnrich, Publication fetched) throws EnricherException{ EnricherUtils.mergeXrefs(publicationToEnrich, publicationToEnrich.getIdentifiers(), fetched.getIdentifiers(), false, true, getPublicationEnricherListener(), getPublicationEnricherListener()); } /** {@inheritDoc} */ @Override public Publication find(Publication publicationToEnrich) throws EnricherException { Publication publicationFetched = null; if(publicationToEnrich.getPubmedId() != null){ publicationFetched = fetchPublication(publicationToEnrich.getPubmedId(), Xref.PUBMED); if(publicationFetched != null) return publicationFetched; } if(publicationToEnrich.getDoi() != null){ publicationFetched = fetchPublication(publicationToEnrich.getDoi(), Xref.DOI); if(publicationFetched != null) return publicationFetched; } return publicationFetched; } /** {@inheritDoc} */ @Override protected void onEnrichedVersionNotFound(Publication publicationToEnrich) throws EnricherException { if(getPublicationEnricherListener() != null) getPublicationEnricherListener().onEnrichmentComplete( publicationToEnrich, EnrichmentStatus.FAILED, "No matching publication could be found."); } /** {@inheritDoc} */ @Override public void enrich(Publication publicationToEnrich, Publication publicationFetched) throws EnricherException { processPublication(publicationToEnrich, publicationFetched); if( getPublicationEnricherListener() != null) getPublicationEnricherListener().onEnrichmentComplete(publicationToEnrich , EnrichmentStatus.SUCCESS , "The publication has been successfully enriched"); } private Publication fetchPublication(String id, String db) throws EnricherException { try { return getPublicationFetcher().fetchByIdentifier(id, db); } catch (BridgeFailedException e) { int index = 1; while(index < retryCount){ try { return getPublicationFetcher().fetchByIdentifier(id, db); } catch (BridgeFailedException ee) { ee.printStackTrace(); } index++; } throw new EnricherException("Re-tried "+ retryCount +" times to fetch the Publication but cannot connect to the fetcher.", e); } } }
/* * Copyright (C) 2013 OTAPlatform * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.oct.updater.ui.component; import android.content.Context; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.PixelFormat; import android.graphics.Rect; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.AdapterView; import android.widget.ImageView; import android.widget.ListView; import com.oct.updater.R; public class TouchInterceptor extends ListView { private ImageView mDragView; private WindowManager mWindowManager; private WindowManager.LayoutParams mWindowParams; private int mDragPos; // which item is being dragged private int mFirstDragPos; // where was the dragged item originally private int mDragPoint; // at what offset inside the item did the user grab it private int mCoordOffset; // the difference between screen coordinates and coordinates in this // view private DragListener mDragListener; private DropListener mDropListener; private int mUpperBound; private int mLowerBound; private int mHeight; private Rect mTempRect = new Rect(); private Bitmap mDragBitmap; private final int mTouchSlop; private int mItemHeightNormal; private int mItemHeightExpanded; private int mItemHeightHalf; public TouchInterceptor(Context context, AttributeSet attrs) { super(context, attrs); mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); Resources res = getResources(); mItemHeightNormal = res.getDimensionPixelSize(R.dimen.normal_height); mItemHeightHalf = mItemHeightNormal / 2; mItemHeightExpanded = res.getDimensionPixelSize(R.dimen.expanded_height); } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { if (mDragListener != null || mDropListener != null) { switch (ev.getAction()) { case MotionEvent.ACTION_DOWN: int x = (int) ev.getX(); int y = (int) ev.getY(); int itemnum = pointToPosition(x, y); if (itemnum == AdapterView.INVALID_POSITION) { break; } ViewGroup item = (ViewGroup) getChildAt(itemnum - getFirstVisiblePosition()); mDragPoint = y - item.getTop(); mCoordOffset = ((int) ev.getRawY()) - y; View dragger = item.findViewById(R.id.grabber); Rect r = mTempRect; if (dragger == null) { return false; } dragger.getDrawingRect(r); // The dragger icon itself is quite small, so pretend the touch area is bigger if (x < r.right * 2) { item.setDrawingCacheEnabled(true); // Create a copy of the drawing cache so that it does not get recycled // by the framework when the list tries to clean up memory Bitmap bitmap = Bitmap.createBitmap(item.getDrawingCache()); startDragging(bitmap, y); mDragPos = itemnum; mFirstDragPos = mDragPos; mHeight = getHeight(); int touchSlop = mTouchSlop; mUpperBound = Math.min(y - touchSlop, mHeight / 3); mLowerBound = Math.max(y + touchSlop, mHeight * 2 / 3); return false; } stopDragging(); break; } } return super.onInterceptTouchEvent(ev); } /* pointToPosition() doesn't consider invisible views, but we need to, so implement a slightly different version. */ private int myPointToPosition(int x, int y) { if (y < 0) { // when dragging off the top of the screen, calculate position // by going back from a visible item int pos = myPointToPosition(x, y + mItemHeightNormal); if (pos > 0) { return pos - 1; } } Rect frame = mTempRect; final int count = getChildCount(); for (int i = count - 1; i >= 0; i--) { final View child = getChildAt(i); child.getHitRect(frame); if (frame.contains(x, y)) { return getFirstVisiblePosition() + i; } } return INVALID_POSITION; } private int getItemForPosition(int y) { int adjustedy = y - mDragPoint - mItemHeightHalf; int pos = myPointToPosition(0, adjustedy); if (pos >= 0) { if (pos <= mFirstDragPos) { pos += 1; } } else if (adjustedy < 0) { // this shouldn't happen anymore now that myPointToPosition deals // with this situation pos = 0; } return pos; } private void adjustScrollBounds(int y) { if (y >= mHeight / 3) { mUpperBound = mHeight / 3; } if (y <= mHeight * 2 / 3) { mLowerBound = mHeight * 2 / 3; } } /* Restore size and visibility for all listitems */ private void unExpandViews(boolean deletion) { for (int i = 0;; i++) { View v = getChildAt(i); if (v == null) { if (deletion) { // HACK force update of mItemCount int position = getFirstVisiblePosition(); int y = getChildAt(0).getTop(); setAdapter(getAdapter()); setSelectionFromTop(position, y); // end hack } layoutChildren(); // force children to be recreated where needed v = getChildAt(i); if (v == null) { break; } } ViewGroup.LayoutParams params = v.getLayoutParams(); params.height = mItemHeightNormal; v.setLayoutParams(params); v.setVisibility(View.VISIBLE); } } /* Adjust visibility and size to make it appear as though an item is being dragged around and other items are making * room for it: If dropping the item would result in it still being in the same place, then make the dragged * listitem's size normal, but make the item invisible. Otherwise, if the dragged listitem is still on screen, make * it as small as possible and expand the item below the insert point. If the dragged item is not on screen, only * expand the item below the current insertpoint. */ private void doExpansion() { int childnum = mDragPos - getFirstVisiblePosition(); if (mDragPos > mFirstDragPos) { childnum++; } View first = getChildAt(mFirstDragPos - getFirstVisiblePosition()); for (int i = 0;; i++) { View vv = getChildAt(i); if (vv == null) { break; } int height = mItemHeightNormal; int visibility = View.VISIBLE; if (vv.equals(first)) { // processing the item that is being dragged if (mDragPos == mFirstDragPos) { // hovering over the original location visibility = View.INVISIBLE; } else { // not hovering over it height = 1; } } else if (i == childnum) { if (mDragPos < getCount() - 1) { height = mItemHeightExpanded; } } ViewGroup.LayoutParams params = vv.getLayoutParams(); params.height = height; vv.setLayoutParams(params); vv.setVisibility(visibility); } } @Override public boolean onTouchEvent(MotionEvent ev) { if ((mDragListener != null || mDropListener != null) && mDragView != null) { int action = ev.getAction(); switch (action) { case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: Rect r = mTempRect; mDragView.getDrawingRect(r); stopDragging(); if (mDropListener != null && mDragPos >= 0 && mDragPos < getCount()) { mDropListener.drop(mFirstDragPos, mDragPos); } unExpandViews(false); break; case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: int x = (int) ev.getX(); int y = (int) ev.getY(); dragView(x, y); int itemnum = getItemForPosition(y); if (itemnum >= 0) { if (action == MotionEvent.ACTION_DOWN || itemnum != mDragPos) { if (mDragListener != null) { mDragListener.drag(mDragPos, itemnum); } mDragPos = itemnum; doExpansion(); } int speed = 0; adjustScrollBounds(y); if (y > mLowerBound) { // scroll the list up a bit speed = y > (mHeight + mLowerBound) / 2 ? 16 : 4; } else if (y < mUpperBound) { // scroll the list down a bit speed = y < mUpperBound / 2 ? -16 : -4; } if (speed != 0) { int ref = pointToPosition(0, mHeight / 2); if (ref == AdapterView.INVALID_POSITION) { // we hit a divider or an invisible view, check somewhere else ref = pointToPosition(0, mHeight / 2 + getDividerHeight() + 64); } View v = getChildAt(ref - getFirstVisiblePosition()); if (v != null) { int pos = v.getTop(); setSelectionFromTop(ref, pos - speed); } } } break; } return true; } return super.onTouchEvent(ev); } private void startDragging(Bitmap bm, int y) { stopDragging(); mWindowParams = new WindowManager.LayoutParams(); mWindowParams.gravity = Gravity.TOP; mWindowParams.x = 0; mWindowParams.y = y - mDragPoint + mCoordOffset; mWindowParams.height = WindowManager.LayoutParams.WRAP_CONTENT; mWindowParams.width = WindowManager.LayoutParams.WRAP_CONTENT; mWindowParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON | WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS; mWindowParams.format = PixelFormat.TRANSLUCENT; mWindowParams.windowAnimations = 0; Context context = getContext(); ImageView v = new ImageView(context); int backGroundColor = 0xE0103010; v.setBackgroundColor(backGroundColor); v.setImageBitmap(bm); mDragBitmap = bm; mWindowManager = (WindowManager) context.getSystemService("window"); mWindowManager.addView(v, mWindowParams); mDragView = v; } private void dragView(int x, int y) { mWindowParams.y = y - mDragPoint + mCoordOffset; mWindowManager.updateViewLayout(mDragView, mWindowParams); } private void stopDragging() { if (mDragView != null) { WindowManager wm = (WindowManager) getContext().getSystemService("window"); wm.removeView(mDragView); mDragView.setImageDrawable(null); mDragView = null; } if (mDragBitmap != null) { mDragBitmap.recycle(); mDragBitmap = null; } } public void setDragListener(DragListener l) { mDragListener = l; } public void setDropListener(DropListener l) { mDropListener = l; } public interface DragListener { void drag(int from, int to); } public interface DropListener { void drop(int from, int to); } }
package code.smell.detection.textual.analysis.ir; public class PottersStemmer { private char[] b; private int i; /* offset into b */ private int iEnd; /* offset to end of stemmed word */ private int j; private int k; private static final int INC = 50; /* unit of size whereby b is increased */ public PottersStemmer() { b = new char[INC]; i = 0; iEnd = 0; } /** * Add a character to the word being stemmed. When you are finished adding * characters, you can call stem(void) to stem the word. */ public void add(char ch) { if (i == b.length) { char[] new_b = new char[i + INC]; for (int c = 0; c < i; c++) new_b[c] = b[c]; b = new_b; } b[i++] = ch; } /** * Adds wLen characters to the word being stemmed contained in a portion of a * char[] array. This is like repeated calls of add(char ch), but faster. */ public void add(char[] w, int wLen) { if (i + wLen >= b.length) { char[] new_b = new char[i + wLen + INC]; for (int c = 0; c < i; c++) new_b[c] = b[c]; b = new_b; } for (int c = 0; c < wLen; c++) b[i++] = w[c]; } /** * After a word has been stemmed, it can be retrieved by toString(), or a * reference to the internal buffer can be retrieved by getResultBuffer and * getResultLength (which is generally more efficient.) */ public String toString() { return new String(b, 0, iEnd); } /** * Returns the length of the word resulting from the stemming process. */ public int getResultLength() { return iEnd; } /** * Returns a reference to a character buffer containing the results of the * stemming process. You also need to consult getResultLength() to determine the * length of the result. */ public char[] getResultBuffer() { return b; } /* cons(i) is true <=> b[i] is a consonant. */ private final boolean cons(int i) { switch (b[i]) { case 'a': case 'e': case 'i': case 'o': case 'u': return false; case 'y': return (i == 0) ? true : !cons(i - 1); default: return true; } } /* * m() measures the number of consonant sequences between 0 and j. if c is a * consonant sequence and v a vowel sequence, and <..> indicates arbitrary * presence, * * <c><v> gives 0 <c>vc<v> gives 1 <c>vcvc<v> gives 2 <c>vcvcvc<v> gives 3 .... */ private final int m() { int n = 0; int i = 0; while (true) { if (i > j) return n; if (!cons(i)) break; i++; } i++; while (true) { while (true) { if (i > j) return n; if (cons(i)) break; i++; } i++; n++; while (true) { if (i > j) return n; if (!cons(i)) break; i++; } i++; } } /* vowelinstem() is true <=> 0,...j contains a vowel */ private final boolean vowelinstem() { int i; for (i = 0; i <= j; i++) if (!cons(i)) return true; return false; } /* doublec(j) is true <=> j,(j-1) contain a double consonant. */ private final boolean doublec(int j) { if (j < 1) return false; if (b[j] != b[j - 1]) return false; return cons(j); } /* * cvc(i) is true <=> i-2,i-1,i has the form consonant - vowel - consonant and * also if the second c is not w,x or y. this is used when trying to restore an * e at the end of a short word. e.g. * * cav(e), lov(e), hop(e), crim(e), but snow, box, tray. * */ private final boolean cvc(int i) { if (i < 2 || !cons(i) || cons(i - 1) || !cons(i - 2)) return false; { int ch = b[i]; if (ch == 'w' || ch == 'x' || ch == 'y') return false; } return true; } private final boolean ends(String s) { int l = s.length(); int o = k - l + 1; if (o < 0) return false; for (int i = 0; i < l; i++) if (b[o + i] != s.charAt(i)) return false; j = k - l; return true; } /* * setto(s) sets (j+1),...k to the characters in the string s, readjusting k. */ private final void setto(String s) { int l = s.length(); int o = j + 1; for (int i = 0; i < l; i++) b[o + i] = s.charAt(i); k = j + l; } /* r(s) is used further down. */ private final void r(String s) { if (m() > 0) setto(s); } /* * step1() gets rid of plurals and -ed or -ing. e.g. * * caresses -> caress ponies -> poni ties -> ti caress -> caress cats -> cat * * feed -> feed agreed -> agree disabled -> disable * * matting -> mat mating -> mate meeting -> meet milling -> mill messing -> mess * * meetings -> meet * */ private final void step1() { if (b[k] == 's') { if (ends("sses")) k -= 2; else if (ends("ies")) setto("i"); else if (b[k - 1] != 's') k--; } if (ends("eed")) { if (m() > 0) k--; } else if ((ends("ed") || ends("ing")) && vowelinstem()) { k = j; if (ends("at")) setto("ate"); else if (ends("bl")) setto("ble"); else if (ends("iz")) setto("ize"); else if (doublec(k)) { k--; { int ch = b[k]; if (ch == 'l' || ch == 's' || ch == 'z') k++; } } else if (m() == 1 && cvc(k)) setto("e"); } } /* step2() turns terminal y to i when there is another vowel in the stem. */ private final void step2() { if (ends("y") && vowelinstem()) b[k] = 'i'; } /* * step3() maps double suffices to single ones. so -ization ( = -ize plus * -ation) maps to -ize etc. note that the string before the suffix must give * m() > 0. */ private final void step3() { if (k == 0) return; /* For Bug 1 */ switch (b[k - 1]) { case 'a': if (ends("ational")) { r("ate"); break; } if (ends("tional")) { r("tion"); break; } break; case 'c': if (ends("enci")) { r("ence"); break; } if (ends("anci")) { r("ance"); break; } break; case 'e': if (ends("izer")) { r("ize"); break; } break; case 'l': if (ends("bli")) { r("ble"); break; } if (ends("alli")) { r("al"); break; } if (ends("entli")) { r("ent"); break; } if (ends("eli")) { r("e"); break; } if (ends("ousli")) { r("ous"); break; } break; case 'o': if (ends("ization")) { r("ize"); break; } if (ends("ation")) { r("ate"); break; } if (ends("ator")) { r("ate"); break; } break; case 's': if (ends("alism")) { r("al"); break; } if (ends("iveness")) { r("ive"); break; } if (ends("fulness")) { r("ful"); break; } if (ends("ousness")) { r("ous"); break; } break; case 't': if (ends("aliti")) { r("al"); break; } if (ends("iviti")) { r("ive"); break; } if (ends("biliti")) { r("ble"); break; } break; case 'g': if (ends("logi")) { r("log"); break; } } } /* step4() deals with -ic-, -full, -ness etc. similar strategy to step3. */ private final void step4() { switch (b[k]) { case 'e': if (ends("icate")) { r("ic"); break; } if (ends("ative")) { r(""); break; } if (ends("alize")) { r("al"); break; } break; case 'i': if (ends("iciti")) { r("ic"); break; } break; case 'l': if (ends("ical")) { r("ic"); break; } if (ends("ful")) { r(""); break; } break; case 's': if (ends("ness")) { r(""); break; } break; } } /* step5() takes off -ant, -ence etc., in context <c>vcvc<v>. */ private final void step5() { if (k == 0) return; /* for Bug 1 */ switch (b[k - 1]) { case 'a': if (ends("al")) break; return; case 'c': if (ends("ance")) break; if (ends("ence")) break; return; case 'e': if (ends("er")) break; return; case 'i': if (ends("ic")) break; return; case 'l': if (ends("able")) break; if (ends("ible")) break; return; case 'n': if (ends("ant")) break; if (ends("ement")) break; if (ends("ment")) break; /* element etc. not stripped before the m */ if (ends("ent")) break; return; case 'o': if (ends("ion") && j >= 0 && (b[j] == 's' || b[j] == 't')) break; /* j >= 0 fixes Bug 2 */ if (ends("ou")) break; return; /* takes care of -ous */ case 's': if (ends("ism")) break; return; case 't': if (ends("ate")) break; if (ends("iti")) break; return; case 'u': if (ends("ous")) break; return; case 'v': if (ends("ive")) break; return; case 'z': if (ends("ize")) break; return; default: return; } if (m() > 1) k = j; } /* step6() removes a final -e if m() > 1. */ private final void step6() { j = k; if (b[k] == 'e') { int a = m(); if (a > 1 || a == 1 && !cvc(k - 1)) k--; } if (b[k] == 'l' && doublec(k) && m() > 1) k--; } /** * Stem the word placed into the Stemmer buffer through calls to add(). Returns * true if the stemming process resulted in a word different from the input. You * can retrieve the result with getResultLength()/getResultBuffer() or * toString(). */ public void stem() { k = i - 1; if (k > 1) { step1(); step2(); step3(); step4(); step5(); step6(); } iEnd = k + 1; i = 0; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.diagnostic.ThreadDumper; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationAdapter; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ex.ApplicationEx; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.util.StandardProgressIndicatorBase; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.FileViewProvider; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.util.Processor; import com.intellij.util.SmartList; import com.intellij.util.containers.Queue; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; public class DocumentCommitThread extends DocumentCommitProcessor implements Runnable, Disposable { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.DocumentCommitThread"); private static final String NAME = "Document commit thread"; private final Queue<CommitTask> documentsToCommit = new Queue<CommitTask>(10); private final List<CommitTask> documentsToApplyInEDT = new ArrayList<CommitTask>(10); // guarded by documentsToCommit private final ApplicationEx myApplication; private volatile boolean isDisposed; private CommitTask currentTask; // guarded by documentsToCommit private volatile boolean threadFinished; private volatile boolean myEnabled; // true if we can do commits. set to false temporarily during the write action. private int runningWriteActions; // accessed in EDT only public static DocumentCommitThread getInstance() { return ServiceManager.getService(DocumentCommitThread.class); } public DocumentCommitThread(final ApplicationEx application) { myApplication = application; // install listener in EDT to avoid missing events in case we are inside write action right now application.invokeLater(new Runnable() { @Override public void run() { assert runningWriteActions == 0; if (application.isDisposed()) return; assert !application.isWriteAccessAllowed(); application.addApplicationListener(new ApplicationAdapter() { @Override public void beforeWriteActionStart(Object action) { int writeActionsBefore = runningWriteActions++; if (writeActionsBefore == 0) { disable("Write action started: " + action); } else { log("before write action: " + action + "; " + writeActionsBefore + " write actions already running", null, false); } } @Override public void writeActionFinished(Object action) { // crazy things happen when running tests, like starting write action in one thread but firing its end in the other int writeActionsAfter = runningWriteActions = Math.max(0,runningWriteActions-1); if (writeActionsAfter == 0) { enable("Write action finished: " + action); } else { log("after write action: " + action + "; " + writeActionsAfter + " write actions still running", null, false); if (writeActionsAfter < 0) { System.err.println("mismatched listeners: " + writeActionsAfter + ";\n==== log==="+log+"\n====end log==="+ ";\n=======threaddump====\n" + ThreadDumper.dumpThreadsToString()+"\n=====END threaddump======="); clearLog(); assert false; } } } }, DocumentCommitThread.this); enable("Listener installed, started"); } }); log("Starting thread", null, false); Thread thread = new Thread(this, NAME); thread.setDaemon(true); thread.start(); } @Override public void dispose() { isDisposed = true; synchronized (documentsToCommit) { documentsToCommit.clear(); } cancel("Stop thread"); wakeUpQueue(); while (!threadFinished) { wakeUpQueue(); synchronized (documentsToCommit) { try { documentsToCommit.wait(10); } catch (InterruptedException ignored) { } } } } private void disable(@NonNls Object reason) { // write action has just started, all commits are useless cancel(reason); myEnabled = false; log("Disabled", null, false, reason); } private void enable(@NonNls Object reason) { myEnabled = true; wakeUpQueue(); log("Enabled", null, false, reason); } private void wakeUpQueue() { synchronized (documentsToCommit) { documentsToCommit.notifyAll(); } } private void cancel(@NonNls @NotNull Object reason) { startNewTask(null, reason); } @Override public void commitAsynchronously(@NotNull final Project project, @NotNull final Document document, @NonNls @NotNull Object reason, @NotNull ModalityState currentModalityState) { queueCommit(project, document, reason, currentModalityState); } void queueCommit(@NotNull final Project project, @NotNull final Document document, @NonNls @NotNull Object reason, @NotNull ModalityState currentModalityState) { assert !isDisposed : "already disposed"; if (!project.isInitialized()) return; PsiFile psiFile = PsiDocumentManager.getInstance(project).getCachedPsiFile(document); if (psiFile == null) return; doQueue(project, document, reason, currentModalityState); } private void doQueue(@NotNull Project project, @NotNull Document document, @NotNull Object reason, @NotNull ModalityState currentModalityState) { synchronized (documentsToCommit) { ProgressIndicator indicator = createProgressIndicator(); CommitTask newTask = new CommitTask(document, project, indicator, reason, currentModalityState); markRemovedFromDocsToCommit(newTask); markRemovedCurrentTask(newTask); removeFromDocsToApplyInEDT(newTask); documentsToCommit.addLast(newTask); log("Queued", newTask, false, reason); wakeUpQueue(); } } final StringBuilder log = new StringBuilder(); @Override public void log(@NonNls String msg, @Nullable CommitTask task, boolean synchronously, @NonNls Object... args) { if (true) return; String indent = new SimpleDateFormat("hh:mm:ss:SSSS").format(new Date()) + (SwingUtilities.isEventDispatchThread() ? "-(EDT) " : Thread.currentThread().getName().equals(NAME) ? "-(DCT) " : "- "); @NonNls String s = indent + msg + (synchronously ? " (sync)" : "") + (task == null ? " - " : "; task: " + task+" ("+System.identityHashCode(task)+")"); for (Object arg : args) { if (!StringUtil.isEmpty(String.valueOf(arg))) { s += "; "+arg; } } if (task != null) { boolean stillUncommitted = !task.project.isDisposed() && ((PsiDocumentManagerBase)PsiDocumentManager.getInstance(task.project)).isInUncommittedSet(task.document); if (stillUncommitted) { s += "; Uncommitted: " + task.document; } } // System.err.println(s); synchronized (log) { log.append(s).append("\n"); if (log.length() > 100000) { log.delete(0, log.length()-50000); } } } // cancels all pending commits @TestOnly private void cancelAll() { synchronized (documentsToCommit) { cancel("cancel all in tests"); markRemovedFromDocsToCommit(null); documentsToCommit.clear(); removeFromDocsToApplyInEDT(null); markRemovedCurrentTask(null); } } @TestOnly public void clearQueue() { cancelAll(); clearLog(); wakeUpQueue(); } private void clearLog() { synchronized (log) { log.setLength(0); } } private void markRemovedCurrentTask(@Nullable CommitTask newTask) { CommitTask task = currentTask; if (task != null && (newTask == null || task.equals(newTask))) { task.removed = true; cancel("Sync commit intervened"); } } private void removeFromDocsToApplyInEDT(@Nullable("null means all") CommitTask newTask) { for (int i = documentsToApplyInEDT.size() - 1; i >= 0; i--) { CommitTask task = documentsToApplyInEDT.get(i); if (newTask == null || task.equals(newTask)) { task.removed = true; documentsToApplyInEDT.remove(i); log("Marked and Removed from EDT apply queue (sync commit called)", task, true); } } } private void markRemovedFromDocsToCommit(@Nullable("null means all") final CommitTask newTask) { processAll(new Processor<CommitTask>() { @Override public boolean process(CommitTask task) { if (newTask == null || task.equals(newTask)) { task.removed = true; log("marker as Removed in background queue", task, true); } return true; } }); } @Override public void run() { threadFinished = false; try { while (!isDisposed) { try { pollQueue(); } catch(Throwable e) { LOG.error(e); } } } finally { threadFinished = true; } // ping the thread waiting for close wakeUpQueue(); log("Good bye", null, false); } private void pollQueue() { boolean success = false; Document document = null; Project project = null; CommitTask task = null; try { ProgressIndicator indicator; synchronized (documentsToCommit) { if (!myEnabled || documentsToCommit.isEmpty()) { documentsToCommit.wait(1000); return; } task = documentsToCommit.pullFirst(); document = task.document; indicator = task.indicator; project = task.project; log("Pulled", task, false, indicator); if (project.isDisposed() || !((PsiDocumentManagerBase)PsiDocumentManager.getInstance(project)).isInUncommittedSet(document)) { log("Abandon and proceed to next",task, false); return; } if (task.removed) { return; // document has been marked as removed, e.g. by synchronous commit } startNewTask(task, "Pulled new task"); // transfer to documentsToApplyInEDT documentsToApplyInEDT.add(task); } Runnable finishRunnable = null; if (indicator.isCanceled()) { success = false; } else { final CommitTask commitTask = task; final Runnable[] result = new Runnable[1]; ProgressManager.getInstance().executeProcessUnderProgress(new Runnable() { @Override public void run() { result[0] = commitUnderProgress(commitTask, false); } }, commitTask.indicator); finishRunnable = result[0]; success = finishRunnable != null; log("commit returned", task, false, finishRunnable, indicator); } if (success) { assert !myApplication.isDispatchThread(); myApplication.invokeLater(finishRunnable, task.myCreationModalityState); log("Invoked later finishRunnable", task, false, finishRunnable, indicator); } } catch (ProcessCanceledException e) { cancel(e); // leave queue unchanged log("PCE", task, false, e); success = false; } catch (InterruptedException e) { // app must be closing log("IE", task, false, e); cancel(e); } catch (Throwable e) { LOG.error(e); cancel(e); } synchronized (documentsToCommit) { if (!success && !task.removed) { // sync commit has not intervened // reset status for queue back successfully doQueue(project, document, "re-added on failure", task.myCreationModalityState); } currentTask = null; // do not cancel, it's being invokeLatered } } @Override public void commitSynchronously(@NotNull Document document, @NotNull Project project) { assert !isDisposed; myApplication.assertWriteAccessAllowed(); if (!project.isInitialized() && !project.isDefault()) { @NonNls String s = project + "; Disposed: "+project.isDisposed()+"; Open: "+project.isOpen(); try { Disposer.dispose(project); } catch (Throwable ignored) { // do not fill log with endless exceptions } throw new RuntimeException(s); } ProgressIndicator indicator = createProgressIndicator(); CommitTask task = new CommitTask(document, project, indicator, "Sync commit", ModalityState.current()); synchronized (documentsToCommit) { markRemovedFromDocsToCommit(task); markRemovedCurrentTask(task); removeFromDocsToApplyInEDT(task); } log("About to commit sync", task, true, indicator); Runnable finish = commitUnderProgress(task, true); log("Committed sync", task, true, finish, indicator); assert finish != null; finish.run(); // let our thread know that queue must be polled again wakeUpQueue(); } @NotNull @Override protected ProgressIndicator createProgressIndicator() { return new StandardProgressIndicatorBase(); } private void startNewTask(@Nullable CommitTask task, @NotNull Object reason) { synchronized (documentsToCommit) { // sync to prevent overwriting CommitTask cur = currentTask; if (cur != null) { cur.indicator.cancel(); } currentTask = task; } log("new task started", task, false, reason); } // returns finish commit Runnable (to be invoked later in EDT), or null on failure @Nullable private Runnable commitUnderProgress(@NotNull final CommitTask task, final boolean synchronously) { final Project project = task.project; final Document document = task.document; final List<Processor<Document>> finishProcessors = new SmartList<Processor<Document>>(); Runnable runnable = new Runnable() { @Override public void run() { myApplication.assertReadAccessAllowed(); if (project.isDisposed()) return; final PsiDocumentManagerBase documentManager = (PsiDocumentManagerBase)PsiDocumentManager.getInstance(project); if (documentManager.isCommitted(document)) return; FileViewProvider viewProvider = documentManager.getCachedViewProvider(document); if (viewProvider == null) { finishProcessors.add(handleCommitWithoutPsi(documentManager, document, task, synchronously)); return; } List<PsiFile> psiFiles = viewProvider.getAllFiles(); for (PsiFile file : psiFiles) { if (file.isValid()) { Processor<Document> finishProcessor = doCommit(task, file, synchronously); if (finishProcessor != null) { finishProcessors.add(finishProcessor); } } } } }; if (synchronously) { myApplication.assertWriteAccessAllowed(); runnable.run(); } else if (!myApplication.tryRunReadAction(runnable)) { log("Could not start read action", task, synchronously, myApplication.isReadAccessAllowed(), Thread.currentThread()); return null; } boolean canceled = task.indicator.isCanceled(); assert !synchronously || !canceled; if (canceled || task.removed) { return null; } Runnable finishRunnable = new Runnable() { @Override public void run() { myApplication.assertIsDispatchThread(); Project project = task.project; if (project.isDisposed()) return; Document document = task.document; synchronized (documentsToCommit) { boolean isValid = !task.removed; for (int i = documentsToApplyInEDT.size() - 1; i >= 0; i--) { CommitTask queuedTask = documentsToApplyInEDT.get(i); boolean taskIsValid = !queuedTask.removed; if (task == queuedTask) { // find the same task in the queue documentsToApplyInEDT.remove(i); isValid &= taskIsValid; log("Task matched, removed from documentsToApplyInEDT", queuedTask, false, task); } else if (!taskIsValid) { documentsToApplyInEDT.remove(i); log("Task invalid, removed from documentsToApplyInEDT", queuedTask, false); } } if (!isValid) { log("Marked as already committed in EDT apply queue, return", task, true); return; } } PsiDocumentManagerBase documentManager = (PsiDocumentManagerBase)PsiDocumentManager.getInstance(project); log("Executing later finishCommit", task, false); boolean success = documentManager.finishCommit(document, finishProcessors, synchronously, task.reason); if (synchronously) { assert success; } log("after call finishCommit",task, synchronously, success); if (synchronously || success) { assert !documentManager.isInUncommittedSet(document); } if (!success) { // add document back to the queue queueCommit(project, document, "Re-added back", task.myCreationModalityState); } } }; return finishRunnable; } @NotNull private Processor<Document> handleCommitWithoutPsi(@NotNull final PsiDocumentManagerBase documentManager, @NotNull Document document, @NotNull final CommitTask task, final boolean synchronously) { final long startDocModificationTimeStamp = document.getModificationStamp(); return new Processor<Document>() { @Override public boolean process(Document document) { log("Finishing without PSI", task, synchronously, document.getModificationStamp(), startDocModificationTimeStamp); if (document.getModificationStamp() != startDocModificationTimeStamp || documentManager.getCachedViewProvider(document) != null) { return false; } documentManager.handleCommitWithoutPsi(document); return true; } }; } private boolean processAll(final Processor<CommitTask> processor) { final boolean[] result = {true}; synchronized (documentsToCommit) { documentsToCommit.process(new Processor<CommitTask>() { @Override public boolean process(CommitTask commitTask) { result[0] &= processor.process(commitTask); return true; } }); } return result[0]; } @TestOnly boolean isEnabled() { return myEnabled; } @Override public String toString() { return "Document commit thread; application: "+myApplication+"; isDisposed: "+isDisposed+"; threadFinished: "+threadFinished+"; myEnabled: "+myEnabled+"; runningWriteActions: "+runningWriteActions; } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.help; import lombok.extern.slf4j.Slf4j; import org.sakaiproject.api.app.help.HelpManager; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.email.api.EmailService; import org.sakaiproject.event.cover.UsageSessionService; import org.sakaiproject.tool.cover.ToolManager; /** * question tool * @version $Id$ */ @Slf4j public class QuestionTool { private String lastName; private String firstName; private String userName; private String emailAddress; private String subject; private String content; private String toEmailAddress; private EmailService emailService; private HelpManager helpManager; /** * get help manager * @return Returns the helpManager. */ public HelpManager getHelpManager() { return helpManager; } /** * set help manager * @param helpManager The helpManager to set. */ public void setHelpManager(HelpManager helpManager) { this.helpManager = helpManager; } /** * get email address * @return Returns the emailAddress. */ public String getEmailAddress() { return emailAddress; } /** * set email address * @param emailAddress The emailAddress to set. */ public void setEmailAddress(String emailAddress) { this.emailAddress = emailAddress; } /** * get first name * @return Returns the firstName. */ public String getFirstName() { return firstName; } /** * set first name * @param firstName The firstName to set. */ public void setFirstName(String firstName) { this.firstName = firstName; } /** * get last name * @return Returns the lastName. */ public String getLastName() { return lastName; } /** * set last name * @param lastName The lastName to set. */ public void setLastName(String lastName) { this.lastName = lastName; } /** * get subject * @return Returns the subject. */ public String getSubject() { return subject; } /** * set subject * @param subject The subject to set. */ public void setSubject(String subject) { this.subject = subject; } /** * get user name * @return Returns the userName. */ public String getUserName() { return userName; } /** * set user name * @param userName The userName to set. */ public void setUserName(String userName) { this.userName = userName; } /** * get email service * @return Returns the emailService. */ public EmailService getEmailService() { return emailService; } /** * set email service * @param emailService The emailService to set. */ public void setEmailService(EmailService emailService) { this.emailService = emailService; } /** * get to email address * @return Returns the toEmailAddress. */ public String getToEmailAddress() { if (toEmailAddress == null) { toEmailAddress = helpManager.getSupportEmailAddress(); } return toEmailAddress; } /** * set to email address * @param toEmailAddress The toEmailAddress to set. */ public void setToEmailAddress(String toEmailAddress) { this.toEmailAddress = toEmailAddress; } /** * get detailed content * @return content */ public String getDetailedContent() { String UNAVAILABLE = "~unavailable~"; String IP = UNAVAILABLE; String agent = UNAVAILABLE; String sessionId = UNAVAILABLE; String serverName = UNAVAILABLE; if (UsageSessionService.getSession() != null) { IP = UsageSessionService.getSession().getIpAddress(); agent = UsageSessionService.getSession().getUserAgent(); sessionId = UsageSessionService.getSession().getId(); serverName = ServerConfigurationService.getServerName(); } String detailedContent = "\n\n" + "Sender's name: " + this.firstName + " " + this.lastName + "\n" + "Sender's UserName: " + userName + "\n" + "Sender's IP: " + IP + "\n" + "Sender's Browser/Agent: " + agent + "\n" + "Sender's SessionID: " + sessionId + "\n" + "Server Name: " + serverName + "\n" + "Comments or questions: \n" + this.getContent() + "\n\n" + "Sender's (reply-to) email: " + emailAddress + "\n\n" + "Site: Help Tool" + "\n" + "Site Id: " + ToolManager.getCurrentPlacement().getContext() + "\n"; return detailedContent; } /** * submit question * @return view */ public String submitQuestion() { this.sendEmail(); return "display"; } /** * reset * @return view */ public String reset() { this.content = ""; this.subject = ""; this.firstName = ""; this.lastName = ""; this.emailAddress = ""; this.userName = ""; return "main"; } /** * submit question cancel * @return */ public String submitQuestionCancel() { return this.reset(); } /** * send email */ private void sendEmail() { try { String detailedContent = getDetailedContent(); emailService.send(emailAddress, this.getToEmailAddress(), subject, detailedContent, null, null, null); } catch (Exception e) { log.error("email service is not set up correctly, can't send user question to support consultant!", e); } } /** * get content * @return Returns the content. */ public String getContent() { return content; } /** * set content * @param content The content to set. */ public void setContent(String content) { this.content = content; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.cluster.failover; import java.util.HashMap; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ClientConsumer; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.tests.integration.cluster.util.SameProcessActiveMQServer; import org.apache.activemq.artemis.tests.integration.cluster.util.TestableServer; import org.apache.activemq.artemis.tests.util.TransportConfigurationUtils; import org.apache.activemq.artemis.core.client.impl.ClientSessionFactoryInternal; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * A PagingFailoverTest * <p/> * TODO: validate replication failover also */ public class PagingFailoverTest extends FailoverTestBase { // Constants ----------------------------------------------------- private static final SimpleString ADDRESS = new SimpleString("SimpleAddress"); private ServerLocator locator; private ClientSession session; private ClientSessionFactoryInternal sf; // Static -------------------------------------------------------- // Constructors -------------------------------------------------- // Public -------------------------------------------------------- @Override @Before public void setUp() throws Exception { super.setUp(); locator = getServerLocator(); } @Override @After public void tearDown() throws Exception { addClientSession(session); super.tearDown(); } @Test public void testPageFailBeforeConsume() throws Exception { internalTestPage(false, true); } @Test public void testPage() throws Exception { internalTestPage(false, false); } @Test public void testPageTransactioned() throws Exception { internalTestPage(true, false); } @Test public void testPageTransactionedFailBeforeConsume() throws Exception { internalTestPage(true, true); } public void internalTestPage(final boolean transacted, final boolean failBeforeConsume) throws Exception { locator.setBlockOnNonDurableSend(true); locator.setBlockOnDurableSend(true); locator.setReconnectAttempts(-1); sf = createSessionFactoryAndWaitForTopology(locator, 2); session = sf.createSession(!transacted, !transacted, 0); session.createQueue(PagingFailoverTest.ADDRESS, PagingFailoverTest.ADDRESS, true); ClientProducer prod = session.createProducer(PagingFailoverTest.ADDRESS); final int TOTAL_MESSAGES = 2000; for (int i = 0; i < TOTAL_MESSAGES; i++) { if (transacted && i % 10 == 0) { session.commit(); } ClientMessage msg = session.createMessage(true); msg.putIntProperty(new SimpleString("key"), i); prod.send(msg); } session.commit(); if (failBeforeConsume) { crash(session); waitForBackup(null, 5); } session.close(); session = sf.createSession(!transacted, !transacted, 0); session.start(); ClientConsumer cons = session.createConsumer(PagingFailoverTest.ADDRESS); final int MIDDLE = TOTAL_MESSAGES / 2; for (int i = 0; i < MIDDLE; i++) { ClientMessage msg = cons.receive(20000); Assert.assertNotNull(msg); msg.acknowledge(); if (transacted && i % 10 == 0) { session.commit(); } Assert.assertEquals(i, msg.getObjectProperty(new SimpleString("key"))); } session.commit(); cons.close(); Thread.sleep(1000); if (!failBeforeConsume) { crash(session); // failSession(session, latch); } session.close(); session = sf.createSession(true, true, 0); cons = session.createConsumer(PagingFailoverTest.ADDRESS); session.start(); for (int i = MIDDLE; i < TOTAL_MESSAGES; i++) { ClientMessage msg = cons.receive(5000); Assert.assertNotNull(msg); msg.acknowledge(); int result = (Integer) msg.getObjectProperty(new SimpleString("key")); Assert.assertEquals(i, result); } } @Test public void testExpireMessage() throws Exception { locator.setBlockOnNonDurableSend(true); locator.setBlockOnDurableSend(true); locator.setReconnectAttempts(-1); ClientSessionFactoryInternal sf = createSessionFactoryAndWaitForTopology(locator, 2); session = sf.createSession(true, true, 0); session.createQueue(PagingFailoverTest.ADDRESS, PagingFailoverTest.ADDRESS, true); ClientProducer prod = session.createProducer(PagingFailoverTest.ADDRESS); final int TOTAL_MESSAGES = 1000; for (int i = 0; i < TOTAL_MESSAGES; i++) { ClientMessage msg = session.createMessage(true); msg.putIntProperty(new SimpleString("key"), i); msg.setExpiration(System.currentTimeMillis() + 1000); prod.send(msg); } crash(session); session.close(); Queue queue = backupServer.getServer().locateQueue(ADDRESS); long timeout = System.currentTimeMillis() + 60000; while (timeout > System.currentTimeMillis() && queue.getPageSubscription().isPaging()) { Thread.sleep(100); // Simulating what would happen on expire queue.expireReferences(); } Assert.assertFalse(queue.getPageSubscription().isPaging()); } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- @Override protected TransportConfiguration getAcceptorTransportConfiguration(final boolean live) { return TransportConfigurationUtils.getInVMAcceptor(live); } @Override protected TransportConfiguration getConnectorTransportConfiguration(final boolean live) { return TransportConfigurationUtils.getInVMConnector(live); } @Override protected ActiveMQServer createServer(final boolean realFiles, final Configuration configuration) { return addServer(createInVMFailoverServer(true, configuration, PAGE_SIZE, PAGE_MAX, new HashMap<String, AddressSettings>(), nodeManager, 2)); } @Override protected TestableServer createTestableServer(Configuration config) { return new SameProcessActiveMQServer(createServer(true, config)); } }
package com.rmn.testrail.util; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; import java.util.List; import java.util.Map; /** * @author mmerrell */ public class HTTPUtils implements Serializable { public HTTPUtils() {} private static final int REQUEST_TIMEOUT = 1 * 60 * 1000; //minutes * seconds * milliseconds private Logger log = LoggerFactory.getLogger(getClass()); /** * Prepare everything about a GET request, including adding the headers, composing the URL, establishing the Connection * @param completeUrl The complete URL, including the apiCall, to send to the service * @param authentication a Base64-encoded String representing this username and password pair * @param headers A map of header key-value pairs to send along with the HTTP request * @return An active, open connection in a post-response state * @throws IOException occurs when submitRequestFromConnectionWithRetry throws IOException */ public HttpURLConnection getHTTPRequest(String completeUrl, String authentication, Map<String, String> headers) throws IOException { //Build the connection, then insert the pid and fp headers HttpURLConnection connection = (HttpURLConnection) new URL(completeUrl).openConnection(); connection.setRequestProperty("Authorization", "Basic " + authentication); log.debug("Attempting to get {}", completeUrl); //Add a new header for each entry in the collection if (headers != null) { for (String key: headers.keySet()) { String value = headers.get(key); log.debug("Adding header [{}: {}]", key, value); connection.setRequestProperty(key, value); } } return submitRequestFromConnectionWithRetry(connection, 2); } /** * Take a fully-baked connection and send it to the server with retry * @param connection A composed HTTP request containing the URL and any headers required * @return An active, open connection in a post-response state * @throws IOException occurs when unable to read response code from HttpUrlConnection */ private HttpURLConnection submitRequestFromConnectionWithRetry(HttpURLConnection connection, int retries) throws IOException { boolean connected = false; int RETRY_DELAY_MS = 500; // initial default value int retryDelayInMS; connection.setReadTimeout(REQUEST_TIMEOUT); connection.setConnectTimeout(REQUEST_TIMEOUT); outer: for (int retry = 0; retry < retries && !connected; retry++) { if (retry > 0) { log.warn("retry " + retry + "/" + retries); try { log.debug("Sleeping for retry: " + RETRY_DELAY_MS); Thread.sleep(RETRY_DELAY_MS); RETRY_DELAY_MS = 500; // reset to default value } catch (InterruptedException e) { // lets ignore this } } // try connect connection.connect(); switch (connection.getResponseCode()) { case HttpURLConnection.HTTP_OK: log.debug(" **OK**"); return connection; case HttpURLConnection.HTTP_GATEWAY_TIMEOUT: log.warn(" **gateway timeout**"); break;// retry case 429: // 429 isn't available in any of the enums log.warn(" **429**"); retryDelayInMS = Integer.parseInt(connection.getHeaderField("Retry-After")) * 1000; // seconds to ms RETRY_DELAY_MS = retryDelayInMS; break;// retry case HttpURLConnection.HTTP_UNAVAILABLE: log.warn("**unavailable**"); break;// retry, server is unstable default: log.error(" **unknown response code**."); break outer; // abort } } return connection; } /** * Take a fully-baked connection and send it to the server * @param connection A composed HTTP request containing the URL and any headers required * @return An active, open connection in a post-response state * @throws IOException occurs when unable to read response fields from HttpUrlConnection */ private HttpURLConnection submitRequestFromConnection(HttpURLConnection connection) throws IOException { //Send the request connection.setDoOutput(true); connection.setReadTimeout(REQUEST_TIMEOUT); connection.setConnectTimeout(REQUEST_TIMEOUT); log.debug("Sending request..."); connection.connect(); log.debug("Response: {}, {}", connection.getResponseCode(), connection.getResponseMessage()); return connection; } /** * Generate the string you'd need to use to re-create this API call. We're not using Curl--this just helps you debug if something goes wrong * @param completeUrl The complete URL, including the apiCall, to send to the service * @param headers A map of header key-value pairs to send along with the HTTP request * @return A String representing the exact curl command needed to reproduce this call outside of this method */ public String getCurlCommandStringGet(String completeUrl, Map<String, String> headers) { String curlStr = "curl -v "; //The default is "verbose", but you can remove this if you don't care about the headers //If there are headers, include them here, otherwise just output the URL if (headers != null) { for ( Map.Entry<String, String> entry: headers.entrySet() ) { curlStr += String.format("-H \"%s: %s\"", entry.getKey(), entry.getValue()); } } curlStr += " " + completeUrl; return curlStr; } /** * Generate the string you'd need to use to re-create this API call * @param completeUrl The complete URL, including the apiCall, to send to the service * @param headers A map of header key-value pairs to send along with the HTTP request * @param values A list of url parameters attached to the request * @return A String representing the exact curl command needed to reproduce this call outside of this method */ public String getCurlCommandStringPost(String completeUrl, Map<String, String> headers, List<NameValuePair> values) { String curlStr = "curl -v"; //The default is "verbose", but you can remove this if you don't care about the headers //If there are headers, include them here, otherwise just output the URL if (headers != null) { for (Map.Entry<String, String> entry: headers.entrySet()) { curlStr += String.format(" -H \"%s: %s\"", entry.getKey(), entry.getValue()); } } curlStr += String.format(" -vi -X POST -d \"%s\" \"%s\"", values.toString(), completeUrl); return curlStr; } /** * Gathers the contents of a URL Connection and concatenates everything into a String * @param connection A URLConnection object that presumably has a getContent() that will have some content to get * @return A Concatenated String of the Content contained in the URLConnection */ public String getContentsFromConnection(URLConnection connection) { //Get the content from the connection. Since the content could be in many forms, this Java library requires us to marshall it into an InputStream, from which we get a... InputStreamReader in; try { in = new InputStreamReader((InputStream) connection.getContent()); } catch ( IOException e ) { throw new RuntimeException("Could not read contents from connection: " + e.getMessage()); } return getContentsFromInputStream(in); } /** * Returns a concatenated String of the contents of an HttpResponse * @param response An HttpResponse * @return a String containing the contents of the HttpResponse * @throws IOException occurs when unable to read content from response */ public String getContentsFromHttpResponse(HttpResponse response) throws IOException { InputStreamReader in = new InputStreamReader(response.getEntity().getContent()); return getContentsFromInputStream(in); } private String getContentsFromInputStream(InputStreamReader in) { //...Buffered reader, which we have to parse out in order to get a string we can use BufferedReader buff = new BufferedReader( in ); String text = ""; //The return values all seem to be contained within one line, so we probably don't need this String line; do { try { line = buff.readLine(); text += line + "\n"; } catch (Exception ex) { return text; } } while (line != null); return text; } /** * Takes a username and password, then returns a Base64-encoded String that can be sent for Basic Auth * @param username the Username to be encoded * @param password the Password to be encoded * @return a Base64-encoded String representing this key-value pair */ public String encodeAuthenticationBase64(String username, String password) { return Base64.encodeBase64String(String.format("%s:%s", username, password).getBytes()); } }